diff options
149 files changed, 2229 insertions, 1624 deletions
diff --git a/.gitignore b/.gitignore index 84c048a73c..e60505f663 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,43 @@ +# +# Are you tempted to edit this file? +# +# First consider if the changes make sense for all, +# or if they are specific to your workflow/system. +# If it is the latter, you can augment this list with +# entries in .git/info/excludes +# +# see also test/files/.gitignore +# + +*.jar +*~ + +build.properties + +# target directories for ant build /build/ +/dists/ + +# other +/out/ +/bin/ +/sandbox/ + +# eclipse, intellij +/.classpath +/.project +/src/intellij/*.iml +/src/intellij/*.ipr +/src/intellij/*.iws +/.cache +/.idea +/.settings + +# bak files produced by ./cleanup-commit +*.bak + +# Standard symbolic link to build/quick/bin +qbin + +# Mac specific, but that is common enough a dev platform to warrant inclusion. +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 0000000000..a273cfb544 --- /dev/null +++ b/README.md @@ -0,0 +1,8 @@ +This is the repository for the [Scala Programming Language](http://www.scala-lang.org). + + - [Report an issue](https://issues.scala-lang.org); + - [Read about the development of the compiler and the standard library](http://docs.scala-lang.org/scala/); + - [Check our Jenkins status](https://scala-webapps.epfl.ch/jenkins/); + - [Download the latest nightly](https://scala-webapps.epfl.ch/jenkins/job/scala-nightly-main-master/ws/dists/latest/*zip*/latest.zip); + - ... and contribute right here! Please, first read our [policy](http://docs.scala-lang.org/scala/pull-request-policy.html), +and [sign the contributor's license agreement](http://typesafe.com/contribute/cla/scala). diff --git a/README.rst b/README.rst deleted file mode 100644 index 004d7b63d0..0000000000 --- a/README.rst +++ /dev/null @@ -1,206 +0,0 @@ -################################################################################ - THE SCALA REPOSITORY -################################################################################ - -This document describes the Scala core (core library and compiler) repository -and how to build it. For information about Scala as a language, you can visit -the web site http://www.scala-lang.org/ - -Part I. The repository layout --------------------------------------------------------------------------------- - -Follows the file layout of the Scala repository. Files marked with a † are not -part of the repository but are either automatically generated by the -build script or user-created if needed. This is not a complete listing. :: - scala/ - +--build/ Build products output directory for ant. - +--build.xml The main Ant build script. - +--dist/ The destination folder for Scala distributions. - +--docs/ Documentation and sample code. - +--lib/ Pre-compiled libraries for the build. - | +--scala-compiler.jar The stable reference ('starr') compiler jar - | +--scala-library.jar The stable reference ('starr') library jar - | +--scala-library-src.jar A snapshot of the source used to build starr. - | ---ant/ Support libraries for ant. - +--pull-binary-libs.sh Pulls binary artifacts from remote repository. - +--push-binary-libs.sh Pushes new binary artifacts and creates sha. - +--README.rst The file you are currently reading. - +--src/ All the source files of Scala. - | +--actors/ The sources of the Actor library. - | +--compiler/ The sources of the Scala compiler. - | +--library/ The sources of the core Scala library. - | ---swing/ The sources of the Swing library. - +--target/ † Build products output directory for sbt. - +--test/ The Scala test suite. - ---tools/ Developer utilities. - - - -Part II. Building Scala with SABBUS --------------------------------------------------------------------------------- - -SABBUS is the name of the Ant build script used to compile Scala. It is mostly -automated and takes care of managing the dependencies. - -^^^^^^^^^^^^^^^^^^^^^^^^ - LAYERS: -^^^^^^^^^^^^^^^^^^^^^^^^ -In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds -Scala in layers. Each layer is a complete compiled Scala compiler and library. -A superior layer is always compiled by the layer just below it. Here is a short -description of the four layers that SABBUS uses, from bottom to top: - -- ``starr``: the stable reference Scala release which is shared by all the - developers. It is found in the repository as 'lib/scala-compiler.jar' and - 'lib/scala-library.jar'. Any committable source code must be compiled directly - by starr to guarantee the bootstrapping of the compiler. - -- ``locker``: the local reference which is compiled by starr and is the work - compiler in a typical development cycle. When it has been built once, it is - “frozen” in this state. Updating it to fit the current source code must be - explicitly requested (see below). - -- ``quick``: the layer which is incrementally built when testing changes in the - compiler or library. This is considered an actual new version when locker is - up-to-date in relation to the source code. - -- ``strap``: a test layer used to check stability of the build. - -^^^^^^^^^^^^^^^^^^^^^^^^ - DEPENDENT CHANGES: -^^^^^^^^^^^^^^^^^^^^^^^^ -SABBUS compiles, for each layer, the Scala library first and the compiler next. -That means that any changes in the library can immediately be used in the -compiler without an intermediate build. On the other hand, if building the -library requires changes in the compiler, a new locker must be built if -bootstrapping is still possible, or a new starr if it is not. - - -^^^^^^^^^^^^^^^^^^^^^^^^ -REQUIREMENTS FOR SABBUS: -^^^^^^^^^^^^^^^^^^^^^^^^ -The Scala build system is based on Apache Ant. Most required pre-compiled -libraries are part of the repository (in 'lib/'). The following however is -assumed to be installed on the build machine: - -- A Java runtime environment (JRE) or SDK 1.6 or above. -- Apache Ant version 1.7.0 or above. -- bash (via cygwin for windows) -- curl - - -Part III. Common use-cases --------------------------------------------------------------------------------- -- ``./pull-binary-libs.sh`` - - Downloads all binary artifacts associated with this commit. This requires - internet access to http://typesafe.artifactoryonline.com/typesafe. - -- ``ant -p`` - - Prints out information about the commonly used ant targets. The interested - developer can find the rest in the XML files. - -- ``ant`` or ``ant build`` - - A quick compilation (to quick) of your changes using the locker compiler. - - - This will rebuild all quick if locker changed. - - This will also rebuild locker if starr changed. - -- ``ln -s build/quick/bin qbin`` (once): -- ``ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test`` - - Incrementally builds quick, and then uses it to compile and run the file - ``sandbox/test.scala``. This is a typical debug cycle. - -- ``ant replacelocker`` - - "unfreezes" locker by updating it to match the current source code. - - - This will delete quick so as not to mix classes compiled with different - versions of locker. - -- ``ant test`` - - Tests that your code is working and fit to be committed. - - - Runs the test suite and bootstrapping test on quick. - - You can run the suite only (skipping strap) with 'ant test.suite'. - -- ``ant docs`` - Generates the HTML documentation for the library from the sources using the - scaladoc tool in quick. Note: on most machines this requires more heap than - is allocate by default. You can adjust the parameters with ANT_OPTS. - Example command line:: - ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs - -- ``ant dist`` - - Builds a distribution. - - - Rebuilds locker from scratch (to make sure it bootstraps). - - Builds everything twice more and compares bit-to-bit the two builds (to - make sure it is stable). - - Runs the test suite (and refuses to build a distribution if it fails). - - Creates a local distribution in 'dists/latest'. - -- ``ant clean`` - - Removes all temporary build files (locker is preserved). - -- ``ant locker.clean`` - - Removes all build files. - -- ``ant all.clean`` - - Removes all build files (including locker) and all distributions. - -Many of these targets offer a variant which runs with -optimise enabled. -Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt, -replacestarr-opt, replacelocker-opt, and distpack-opt. - -Part IV. Contributing to Scala --------------------------------------------------------------------------------- - -If you wish to contribute, you can find all of the necessary information on -the official Scala website: www.scala-lang.org. - -Specifically, you can subscribe to the Scala mailing lists, read all of the -available documentation, and browse the live github repository. You can contact -the Scala team by sending us a message on one of the mailing lists, or by using -the available contact form. - -In detail: - -- Scala website (links to everything else): - http://www.scala-lang.org - -- Scala documentation: - http://docs.scala-lang.org - -- Scala mailing lists: - http://www.scala-lang.org/node/199 - -- Scala bug and issue tracker: - https://issues.scala-lang.org - -- Scala live git source tree: - http://github.com/scala/scala - -If you are interested in contributing code, we ask you to sign the -[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala), -which allows us to ensure that all code submitted to the project is -unencumbered by copyrights or patents. - -Before submitting a pull-request, please make sure you have followed the guidelines -outlined in our `Pull Request Policy <https://github.com/scala/scala/wiki/Pull-Request-Policy>`_. - ------------------- - - - -Thank you! - -The Scala Team @@ -380,7 +380,7 @@ TODO: </then></if> <!-- Allow this to be overridden simply --> - <property name="sbt.latest.version" value="0.12.2"/> + <property name="sbt.latest.version" value="0.12.4"/> <property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/> <property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/> @@ -1554,7 +1554,7 @@ TODO: <target name="test.junit" depends="test.junit.comp"> <stopwatch name="test.junit.timer"/> <mkdir dir="${test.junit.classes}"/> - <junit fork="yes" haltonfailure="yes" showoutput="yes" printsummary="on"> + <junit fork="yes" haltonfailure="yes" printsummary="on"> <classpath refid="test.junit.compiler.build.path"/> <batchtest fork="yes" todir="${build-junit.dir}"> <fileset dir="${test.junit.classes}"> diff --git a/gitignore.SAMPLE b/gitignore.SAMPLE deleted file mode 100644 index 7114225a4a..0000000000 --- a/gitignore.SAMPLE +++ /dev/null @@ -1,28 +0,0 @@ -# see also test/files/.gitignore -/.gitignore -/test/files/.gitignore - -*.jar -*~ - -# target directories for ant build -/build/ -/dists/ - -# other -/out/ -/bin/ -/sandbox/ - -# eclipse, intellij -/.classpath -/.project -/src/intellij/*.iml -/src/intellij/*.ipr -/src/intellij/*.iws -/.cache -/.idea -/.settings - -# bak files produced by ./cleanup-commit -*.bak diff --git a/lib/.gitignore b/lib/.gitignore deleted file mode 100644 index 0c507490be..0000000000 --- a/lib/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -ant-contrib.jar -ant-dotnet-1.0.jar -ant.jar -fjbg.jar -forkjoin.jar -jline.jar -maven-ant-tasks-2.1.1.jar -msil.jar -scala-compiler.jar -scala-compiler-src.jar -scala-library.jar -scala-library-src.jar -scala-reflect.jar -scala-reflect-src.jar -vizant.jar diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1 index debbce2d7e..e3fa18b018 100644 --- a/lib/scala-compiler-src.jar.desired.sha1 +++ b/lib/scala-compiler-src.jar.desired.sha1 @@ -1 +1 @@ -19d04510ac6f25d088da82527d8435b68c00153d ?scala-compiler-src.jar +d62bc132cba37b17c8d5ac65feb20193a3f8cc28 ?scala-compiler-src.jar diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1 index 4ec9610bae..68ac8d7e5c 100644 --- a/lib/scala-compiler.jar.desired.sha1 +++ b/lib/scala-compiler.jar.desired.sha1 @@ -1 +1 @@ -3585351c6a62186097be55fff88bee88a985f5c0 ?scala-compiler.jar +d049885894b93e12f034d4d871c38bfc4d026525 ?scala-compiler.jar diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1 index 6e97551fc7..5f74d96c66 100644 --- a/lib/scala-library-src.jar.desired.sha1 +++ b/lib/scala-library-src.jar.desired.sha1 @@ -1 +1 @@ -e606934dc00ced6bfac715bbdba427f9c2c18bc7 ?scala-library-src.jar +58db8f554695791217de332aa6500a7aa240e480 ?scala-library-src.jar diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1 index 36aedb2ad7..b7befab52a 100644 --- a/lib/scala-library.jar.desired.sha1 +++ b/lib/scala-library.jar.desired.sha1 @@ -1 +1 @@ -36456c52b0395fc1e6e367291e45bd503fa019c5 ?scala-library.jar +12007d1b1b913b563093b22e947e6c05fe40f3de ?scala-library.jar diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1 index ebd6dcbf5a..566111fd51 100644 --- a/lib/scala-reflect-src.jar.desired.sha1 +++ b/lib/scala-reflect-src.jar.desired.sha1 @@ -1 +1 @@ -51787a41cae5b0ec6910c5a1a6af392e17550856 ?scala-reflect-src.jar +c842d370d814515f15159cefa4b9c495d99bb1a9 ?scala-reflect-src.jar diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1 index 4378fec9d6..a6cdbc18ad 100644 --- a/lib/scala-reflect.jar.desired.sha1 +++ b/lib/scala-reflect.jar.desired.sha1 @@ -1 +1 @@ -d4a4c0aab882412461fbd9d39cf47da5a619855e ?scala-reflect.jar +a6595b3d7589085f683d4ad5a6072a057ab15ef9 ?scala-reflect.jar diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala index a60a2c2306..6ec111cf7c 100644 --- a/src/compiler/scala/reflect/macros/compiler/Errors.scala +++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala @@ -53,7 +53,7 @@ trait Errors extends Traces { // not exactly an error generator, but very related // and I dearly wanted to push it away from Macros.scala private def checkConforms(slot: String, rtpe: Type, atpe: Type) = { - val verbose = macroDebugVerbose || settings.explaintypes.value + val verbose = macroDebugVerbose def check(rtpe: Type, atpe: Type): Boolean = { def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true } @@ -70,9 +70,12 @@ trait Errors extends Traces { if (verbose) withTypesExplained(check(rtpe, atpe)) else check(rtpe, atpe) if (!ok) { - if (!macroDebugVerbose) - explainTypes(rtpe, atpe) - compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString))) + if (!verbose) explainTypes(rtpe, atpe) + val msg = { + val ss = Seq(rtpe, atpe) map (this abbreviateCoreAliases _.toString) + s"type mismatch for $slot: ${ss(0)} does not conform to ${ss(1)}" + } + compatibilityError(msg) } } @@ -106,8 +109,8 @@ trait Errors extends Traces { def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) = compatibilityError(NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value)) - def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) = - compatibilityError( - "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+ - ex.getMessage) -}
\ No newline at end of file + def MacroImplTparamInstantiationError(atparams: List[Symbol], e: NoInstance) = { + val badps = atparams map (_.defString) mkString ", " + compatibilityError(f"type parameters $badps cannot be instantiated%n${e.getMessage}") + } +} diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala index 5a454e1e07..bd60faf4cd 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala @@ -45,6 +45,8 @@ trait GenAnnotationInfos { mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*)) case NestedAnnotArg(ann) => mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann)) + case _ => + sys.error(s"Don't know what to do with $arg") } // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 5f53f558b4..7578def687 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -91,20 +91,20 @@ trait Reshape { private def undoMacroExpansion(tree: Tree): Tree = tree.attachments.get[analyzer.MacroExpansionAttachment] match { case Some(analyzer.MacroExpansionAttachment(original, _)) => + def mkImplicitly(tp: Type) = atPos(tree.pos)( + gen.mkNullaryCall(Predef_implicitly, List(tp)) + ) + val sym = original.symbol original match { // this hack is necessary until I fix implicit macros // so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar // hence we cannot reify references to them, because noone will be able to see them later // when implicit macros are fixed, these sneaky macros will move to corresponding companion objects // of, say, ClassTag or TypeTag - case Apply(TypeApply(_, List(tt)), _) if original.symbol == materializeClassTag => - gen.mkNullaryCall(Predef_implicitly, List(appliedType(ClassTagClass, tt.tpe))) - case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeWeakTypeTag => - gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe)))) - case Apply(TypeApply(_, List(tt)), List(pre)) if original.symbol == materializeTypeTag => - gen.mkNullaryCall(Predef_implicitly, List(typeRef(pre.tpe, TypeTagClass, List(tt.tpe)))) - case _ => - original + case Apply(TypeApply(_, List(tt)), _) if sym == materializeClassTag => mkImplicitly(appliedType(ClassTagClass, tt.tpe)) + case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeWeakTypeTag => mkImplicitly(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe))) + case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeTypeTag => mkImplicitly(typeRef(pre.tpe, TypeTagClass, List(tt.tpe))) + case _ => original } case _ => tree } @@ -231,13 +231,10 @@ trait Reshape { val args = if (ann.assocs.isEmpty) { ann.args } else { - def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match { - case LiteralAnnotArg(const) => - Literal(const) - case ArrayAnnotArg(arr) => - Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation) - case NestedAnnotArg(ann) => - toPreTyperAnnotation(ann) + def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = (jann: @unchecked) match { + case LiteralAnnotArg(const) => Literal(const) + case ArrayAnnotArg(arr) => Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation) + case NestedAnnotArg(ann) => toPreTyperAnnotation(ann) } ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) } diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl index bd6cf561b9..a3a95ffd37 100644 --- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl @@ -89,4 +89,4 @@ goto :eof @@endlocal REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu -@@%COMSPEC% /C exit %errorlevel% >nul +@@"%COMSPEC%" /C exit %errorlevel% >nul diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index ea6543bb71..a6c69091c5 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -51,7 +51,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) class GlobalMirror extends Roots(NoSymbol) { val universe: self.type = self - def rootLoader: LazyType = platform.rootLoader + def rootLoader: LazyType = new loaders.PackageLoader(classPath) override def toString = "compiler mirror" } @@ -70,6 +70,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override def settings = currentSettings + /** Switch to turn on detailed type logs */ + var printTypings = settings.Ytyperdebug.value + def this(reporter: Reporter) = this(new Settings(err => reporter.error(null, err)), reporter) @@ -80,12 +83,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // platform specific elements - type ThisPlatform = Platform { val global: Global.this.type } + protected class GlobalPlatform extends { + val global: Global.this.type = Global.this + val settings: Settings = Global.this.settings + } with JavaPlatform - lazy val platform: ThisPlatform = - new { val global: Global.this.type = Global.this } with JavaPlatform + type ThisPlatform = JavaPlatform { val global: Global.this.type } + lazy val platform: ThisPlatform = new GlobalPlatform - type PlatformClassPath = ClassPath[platform.BinaryRepr] + type PlatformClassPath = ClassPath[AbstractFile] type OptClassPath = Option[PlatformClassPath] def classPath: PlatformClassPath = platform.classPath @@ -265,12 +271,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) log("!!! " + msg) // such warnings always at least logged } - private def elapsedMessage(msg: String, start: Long) = - msg + " in " + (currentTime - start) + "ms" - def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) - def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]") - def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) def logError(msg: String, t: Throwable): Unit = () @@ -354,9 +355,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) getSourceFile(f) } - lazy val loaders = new SymbolLoaders { + lazy val loaders = new { val global: Global.this.type = Global.this - } + val platform: Global.this.platform.type = Global.this.platform + } with GlobalSymbolLoaders /** Returns the mirror that loaded given symbol */ def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror @@ -417,10 +419,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - /** Switch to turn on detailed type logs */ - val printTypings = settings.Ytyperdebug.value - val printInfers = settings.Yinferdebug.value - // phaseName = "parser" lazy val syntaxAnalyzer = new { val global: Global.this.type = Global.this @@ -915,7 +913,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) { ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries")) - val getName: ClassPath[platform.BinaryRepr] => String = (_.name) + val getName: ClassPath[AbstractFile] => String = (_.name) def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty def invalidateOrRemove(root: ClassSymbol) = { allEntries match { diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala new file mode 100644 index 0000000000..6921548230 --- /dev/null +++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala @@ -0,0 +1,30 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala +package tools +package nsc + +/** + * Symbol loaders implementation that wires dependencies using Global. + */ +abstract class GlobalSymbolLoaders extends symtab.SymbolLoaders { + val global: Global + val symbolTable: global.type = global + val platform: symbolTable.platform.type + import global._ + def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = { + def lookup = sym.info.member(name) + // if loading during initialization of `definitions` typerPhase is not yet set. + // in that case we simply load the member at the current phase + if (currentRun.typerPhase eq null) + lookup + else + enteringTyper { lookup } + } + + protected def compileLate(srcfile: io.AbstractFile): Unit = + currentRun.compileLate(srcfile) +} diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala deleted file mode 100644 index e379afce9b..0000000000 --- a/src/compiler/scala/tools/nsc/Phases.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc - -import scala.reflect.internal.util.TableDef -import scala.language.postfixOps - -@deprecated("Scheduled for removal as being a dead-code in the compiler.", "2.10.1") -object Phases { - val MaxPhases = 64 - - /** A class for tracking something about each phase. - */ - class Model[T] { - case class Cell(ph: Phase, value: T) { - def name = ph.name - def id = ph.id - } - val values = new Array[Cell](MaxPhases + 1) - def results = values filterNot (_ == null) - def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value) - } - /** A class for recording the elapsed time of each phase in the - * interests of generating a classy and informative table. - */ - class TimingModel extends Model[Long] { - var total: Long = 0 - def table() = { - total = results map (_.value) sum; - new Format.Table(results sortBy (-_.value)) - } - object Format extends TableDef[Cell] { - >> ("phase" -> (_.name)) >+ " " - << ("id" -> (_.id)) >+ " " - >> ("ms" -> (_.value)) >+ " " - << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f")) - } - } -} - diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 03cdead472..2a8412b105 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -952,9 +952,8 @@ trait Scanners extends ScannersCommon { } if (value > limit) syntaxError("floating point number too large") - if (isDeprecatedForm) { - deprecationWarning("This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.") - } + if (isDeprecatedForm) + syntaxError("floating point number is missing digit after dot") if (negated) -value else value } catch { @@ -1010,10 +1009,8 @@ trait Scanners extends ScannersCommon { val lookahead = lookaheadReader val c = lookahead.getc() - /* As of scala 2.11, it isn't a number unless c here is a digit, so - * settings.future.value excludes the rest of the logic. - */ - if (settings.future && !isDigit(c)) + /* Prohibit 1. */ + if (!isDigit(c)) return setStrVal() val isDefinitelyNumber = (c: @switch) match { diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index c5fc12e3ec..32b5a98b98 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -11,33 +11,22 @@ import util.{ClassPath,MergedClassPath,DeltaClassPath} import scala.tools.util.PathResolver trait JavaPlatform extends Platform { + val global: Global + override val symbolTable: global.type = global import global._ import definitions._ - type BinaryRepr = AbstractFile + private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None - private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None - - def classPath: ClassPath[BinaryRepr] = { + def classPath: ClassPath[AbstractFile] = { if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) currentClassPath.get } /** Update classpath with a substituted subentry */ - def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) = + def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) = currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst)) - def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]]) - // [Martin] Why do we need a cast here? - // The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform. - // So we cannot infer that global.platform.BinaryRepr is AbstractFile. - // Ideally, we should be able to write at the top of the JavaPlatform trait: - // val global: Global { val platform: JavaPlatform } - // import global._ - // Right now, this does nothing because the concrete definition of platform in Global - // replaces the tighter abstract definition here. If we had DOT typing rules, the two - // types would be conjoined and everything would work out. Yet another reason to push for DOT. - private def classEmitPhase = if (settings.isBCodeActive) genBCode else genASM @@ -66,10 +55,7 @@ trait JavaPlatform extends Platform { (sym isNonBottomSubClass BoxedBooleanClass) } - def newClassLoader(bin: AbstractFile): loaders.SymbolLoader = - new loaders.ClassfileLoader(bin) - - def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = true + def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true def needCompile(bin: AbstractFile, src: AbstractFile) = src.lastModified >= bin.lastModified diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index e2b22c06d7..3bca16635b 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -12,20 +12,18 @@ import io.AbstractFile /** The platform dependent pieces of Global. */ trait Platform { - val global: Global - import global._ + val symbolTable: symtab.SymbolTable + import symbolTable._ /** The binary classfile representation type */ - type BinaryRepr + @deprecated("BinaryRepr is not an abstract type anymore. It's an alias that points at AbstractFile. It'll be removed before Scala 2.11 is released.", "2.11.0-M5") + type BinaryRepr = AbstractFile /** The compiler classpath. */ - def classPath: ClassPath[BinaryRepr] - - /** The root symbol loader. */ - def rootLoader: LazyType + def classPath: ClassPath[AbstractFile] /** Update classpath with a substitution that maps entries to entries */ - def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) + def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) /** Any platform-specific phases. */ def platformPhases: List[SubComponent] @@ -36,16 +34,13 @@ trait Platform { /** The various ways a boxed primitive might materialize at runtime. */ def isMaybeBoxed(sym: Symbol): Boolean - /** Create a new class loader to load class file `bin` */ - def newClassLoader(bin: BinaryRepr): loaders.SymbolLoader - /** * Tells whether a class should be loaded and entered into the package * scope. On .NET, this method returns `false` for all synthetic classes * (anonymous classes, implementation classes, module classes), their * symtab is encoded in the pickle of another class. */ - def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean + def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean /** * Tells whether a class with both a binary and a source representation @@ -53,6 +48,6 @@ trait Platform { * on the JVM similar to javac, i.e. if the source file is newer than the classfile, * a re-compile is triggered. On .NET by contrast classfiles always take precedence. */ - def needCompile(bin: BinaryRepr, src: AbstractFile): Boolean + def needCompile(bin: AbstractFile, src: AbstractFile): Boolean } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index e6f21fc1e3..410d451316 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1479,26 +1479,18 @@ abstract class GenICode extends SubComponent { if (mustUseAnyComparator) { // when -optimise is on we call the @inline-version of equals, found in ScalaRunTime - val equalsMethod = + val equalsMethod: Symbol = { if (!settings.optimise) { - def default = platform.externalEquals - platform match { - case x: JavaPlatform => - import x._ - if (l.tpe <:< BoxedNumberClass.tpe) { - if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum - else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar - else externalEqualsNumObject - } - else default - - case _ => default - } - } - else { + if (l.tpe <:< BoxedNumberClass.tpe) { + if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum + else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar + else platform.externalEqualsNumObject + } else platform.externalEquals + } else { ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule)) getMember(ScalaRunTimeModule, nme.inlinedEquals) } + } val ctx1 = genLoad(l, ctx, ObjectReference) val ctx2 = genLoad(r, ctx1, ObjectReference) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index 3f2141782a..b9eb8f8aac 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -10,6 +10,7 @@ package icode import java.io.PrintWriter import analysis.{ Liveness, ReachingDefinitions } import scala.tools.nsc.symtab.classfile.ICodeReader +import scala.reflect.io.AbstractFile /** Glue together ICode parts. * @@ -108,6 +109,12 @@ abstract class ICodes extends AnyRef object icodeReader extends ICodeReader { lazy val global: ICodes.this.global.type = ICodes.this.global + import global._ + def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = + global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name) + lazy val symbolTable: global.type = global + lazy val loaders: global.loaders.type = global.loaders + def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath } /** A phase which works on icode. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index a7f43eefed..683f35e41f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1187,22 +1187,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } if (mustUseAnyComparator) { - val equalsMethod = { - - def default = platform.externalEquals - - platform match { - case x: JavaPlatform => - import x._ - if (l.tpe <:< BoxedNumberClass.tpe) { - if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum - else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar - else externalEqualsNumObject - } - else default - - case _ => default - } + val equalsMethod: Symbol = { + if (l.tpe <:< BoxedNumberClass.tpe) { + if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum + else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar + else platform.externalEqualsNumObject + } else platform.externalEquals } genLoad(l, ObjectReference) genLoad(r, ObjectReference) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 62270b7c0a..182209dfe6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -654,7 +654,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters { def emitArgument(av: asm.AnnotationVisitor, name: String, arg: ClassfileAnnotArg) { - arg match { + (arg: @unchecked) match { case LiteralAnnotArg(const) => if (const.isNonUnitAnyVal) { av.visit(name, const.value) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index ea2cbbe3d3..4cb2f514ec 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -932,7 +932,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def emitArgument(av: asm.AnnotationVisitor, name: String, arg: ClassfileAnnotArg) { - arg match { + (arg: @unchecked) match { case LiteralAnnotArg(const) => if(const.isNonUnitAnyVal) { av.visit(name, const.value) } diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala deleted file mode 100644 index 81d64421b3..0000000000 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ /dev/null @@ -1,221 +0,0 @@ -package scala.tools.nsc -package dependencies - -import symtab.Flags - -import scala.collection._ - -/** A component that describes the possible changes between successive - * compilations of a class. - */ -abstract class Changes { - - /** A compiler instance used to compile files on demand. */ - val compiler: Global - - import compiler._ - import symtab.Flags._ - - sealed abstract class Change - - private lazy val annotationsChecked = - List(definitions.SpecializedClass) // Any others that should be checked? - - private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED | - OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD | - MODULE | INTERFACE | PARAM | BYNAMEPARAM | CONTRAVARIANT | - DEFAULTPARAM | ACCESSOR | LAZY | SPECIALIZED - - /** Are the new modifiers more restrictive than the old ones? */ - private def moreRestrictive(from: Long, to: Long): Boolean = - ((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L) - || (((to & PROTECTED) != 0L) && (from & PROTECTED) == 0L)) - - /** Check if flags have changed **/ - private def modifiedFlags(from: Long, to: Long): Boolean = - (from & IMPLICIT) != (to & IMPLICIT) - - /** An entity in source code, either a class or a member definition. - * Name is fully-qualified. - */ - sealed abstract class Entity - case class Class(name: String) extends Entity - case class Definition(name: String) extends Entity - - case class Added(e: Entity) extends Change - case class Removed(e: Entity) extends Change - case class Changed(e: Entity)(implicit val reason: String) extends Change { - override def toString = "Changed(" + e + ")[" + reason + "]" - } - case class ParentChanged(e: Entity) extends Change - - private val changedTypeParams = new mutable.HashSet[String] - - private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean = - sameSymbol(sym1, sym2, simple = true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140 - private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean = - if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName - private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean = - (sym1.flags & flagsToCheck) == (sym2.flags & flagsToCheck) - private def sameAnnotations(sym1: Symbol, sym2: Symbol): Boolean = - annotationsChecked.forall(a => - (sym1.hasAnnotation(a) == sym2.hasAnnotation(a))) - - private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = sameType0(tp1, tp2) - - private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match { - /*case (ErrorType, _) => false - case (WildcardType, _) => false - case (_, ErrorType) => false - case (_, WildcardType) => false - */ - case (NoType, _) => false - case (NoPrefix, NoPrefix) => true - case (_, NoType) => false - case (_, NoPrefix) => false - - case (ThisType(sym1), ThisType(sym2)) - if sameSymbol(sym1, sym2) => true - - case (SingleType(pre1, sym1), SingleType(pre2, sym2)) - if sameType(pre1, pre2) && sameSymbol(sym1, sym2) => true - case (ConstantType(value1), ConstantType(value2)) => - value1 == value2 - case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) => - val testSymbols = - if (!sameSymbol(sym1, sym2)) { - val v = (!strict && sym1.isType && sym2.isType && sameType(sym1.info, sym2.info)) - if (v) changedTypeParams += sym1.fullName - v - } else - !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName) - - // @M! normalize reduces higher-kinded case to PolyType's - testSymbols && sameType(pre1, pre2) && - (sym1.variance == sym2.variance) && - ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || - sameTypes(args1, args2)) - - case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) => - def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { - sym2 => - var e1 = s1.lookupEntry(sym2.name) - (e1 ne null) && { - var isEqual = false - while (!isEqual && (e1 ne null)) { - isEqual = sameType(e1.sym.info, sym2.info) - e1 = s1.lookupNextEntry(e1) - } - isEqual - } - } - sameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1) - case (mt1 @ MethodType(params1, res1), mt2 @ MethodType(params2, res2)) => - // new dependent types: probably fix this, use substSym as done for PolyType - sameTypes(tp1.paramTypes, tp2.paramTypes) && - (tp1.params corresponds tp2.params)((t1, t2) => sameParameterSymbolNames(t1, t2) && sameFlags(t1, t2)) && - sameType(res1, res2) && - mt1.isImplicit == mt2.isImplicit - case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => - sameTypeParams(tparams1, tparams2) && sameType(res1, res2) - case (NullaryMethodType(res1), NullaryMethodType(res2)) => - sameType(res1, res2) - case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) => - sameTypeParams(tparams1, tparams2)(strict = false) && sameType(res1, res2)(strict = false) - case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => - sameType(lo1, lo2) && sameType(hi1, hi2) - case (BoundedWildcardType(bounds), _) => - bounds containsType tp2 - case (_, BoundedWildcardType(bounds)) => - bounds containsType tp1 - case (AnnotatedType(_,_,_), _) => - annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && - sameType(tp1.withoutAnnotations, tp2.withoutAnnotations) - case (_, AnnotatedType(_,_,_)) => - annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && - sameType(tp1.withoutAnnotations, tp2.withoutAnnotations) - case (_: SingletonType, _: SingletonType) => - var origin1 = tp1 - while (origin1.underlying.isInstanceOf[SingletonType]) { - assert(origin1 ne origin1.underlying, origin1) - origin1 = origin1.underlying - } - var origin2 = tp2 - while (origin2.underlying.isInstanceOf[SingletonType]) { - assert(origin2 ne origin2.underlying, origin2) - origin2 = origin2.underlying - } - ((origin1 ne tp1) || (origin2 ne tp2)) && sameType(origin1, origin2) - case _ => - false - }) || { - val tp1n = normalizePlus(tp1) - val tp2n = normalizePlus(tp2) - ((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n) - } - - private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol])(implicit strict: Boolean) = - sameTypes(tparams1 map (_.info), tparams2 map (_.info)) && - sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe)) && - (tparams1 corresponds tparams2)((t1, t2) => sameAnnotations(t1, t2)) - - private def sameTypes(tps1: List[Type], tps2: List[Type])(implicit strict: Boolean) = - (tps1 corresponds tps2)(sameType(_, _)) - - /** Return the list of changes between 'from' and 'toSym.info'. - */ - def changeSet(from: Type, toSym: Symbol): List[Change] = { - implicit val defaultStrictTypeRefTest = true - - val to = toSym.info - changedTypeParams.clear() - def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC) - val cs = new mutable.ListBuffer[Change] - - if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) }) - cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString) - if (!sameTypeParams(from.typeParams, to.typeParams)(strict = false)) - cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams)) - - // new members not yet visited - val newMembers = mutable.HashSet[Symbol]() - newMembers ++= to.decls.iterator filter omitSymbols - - for (o <- from.decls.iterator filter omitSymbols) { - val n = to.decl(o.name) - newMembers -= n - - if (o.isClass) - cs ++= changeSet(o.info, n) - else if (n == NoSymbol) - cs += Removed(toEntity(o)) - else { - val newSym = - o match { - case _:TypeSymbol if o.isAliasType => - n.suchThat(ov => sameType(ov.info, o.info)) - case _ => - n.suchThat(ov => sameType(ov.tpe, o.tpe)) - } - if (newSym == NoSymbol || moreRestrictive(o.flags, newSym.flags) || modifiedFlags(o.flags, newSym.flags)) - cs += Changed(toEntity(o))(n + " changed from " + o.tpe + " to " + n.tpe + " flags: " + Flags.flagsToString(o.flags)) - else if (newSym.isGetter && (o.accessed(from).hasFlag(MUTABLE) != newSym.accessed.hasFlag(MUTABLE))) - // o.owner is already updated to newSym.owner - // so o.accessed will return the accessed for the new owner - cs += Changed(toEntity(o))(o.accessed(from) + " changed to " + newSym.accessed) - else - newMembers -= newSym - } - }: Unit // Give the type explicitly until #2281 is fixed - - cs ++= (newMembers map (Added compose toEntity)) - cs.toList - } - def removeChangeSet(sym: Symbol): Change = Removed(toEntity(sym)) - def changeChangeSet(sym: Symbol, msg: String): Change = Changed(toEntity(sym))(msg) - def parentChangeSet(sym: Symbol): Change = ParentChanged(toEntity(sym)) - - private def toEntity(sym: Symbol): Entity = - if (sym.isClass) Class(sym.fullName) - else Definition(sym.fullName) -} diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala index c9718f711a..8b897b83b2 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala @@ -33,11 +33,4 @@ trait AbsScalaSettings { def PhasesSetting(name: String, descr: String, default: String): PhasesSetting def StringSetting(name: String, helpArg: String, descr: String, default: String): StringSetting def PrefixSetting(name: String, prefix: String, descr: String): PrefixSetting - - /** **/ - abstract class SettingGroup(val prefix: String) extends AbsSetting { - def name = prefix - def helpDescription: String = sys.error("todo") - def unparse: List[String] = List(name) - } } diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index d173aaa848..37dfafb01c 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -35,8 +35,6 @@ trait StandardScalaSettings { val feature = BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly.") val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars") val help = BooleanSetting ("-help", "Print a synopsis of standard options") - val make = ChoiceSetting ("-make", "policy", "Recompilation detection policy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all") - . withDeprecationMessage ("this option is unmaintained. Use sbt or an IDE for selective recompilation.") val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.") val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 4e4efef607..4b9e056df3 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -12,9 +12,10 @@ import scala.tools.nsc.io.AbstractFile * This class should be used whenever file dependencies and recompile sets * are managed automatically. */ -abstract class BrowsingLoaders extends SymbolLoaders { - import global._ +abstract class BrowsingLoaders extends GlobalSymbolLoaders { + val global: Global + import global._ import syntaxAnalyzer.{OutlineParser, MalformedInput} /** In browse mode, it can happen that an encountered symbol is already diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index fd85bbb169..6f27eb8660 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -20,8 +20,23 @@ import scala.reflect.io.{ AbstractFile, NoAbstractFile } * @version 1.0 */ abstract class SymbolLoaders { - val global: Global - import global._ + val symbolTable: symtab.SymbolTable { + def settings: Settings + } + val platform: backend.Platform { + val symbolTable: SymbolLoaders.this.symbolTable.type + } + import symbolTable._ + /** + * Required by ClassfileParser. Check documentation in that class for details. + */ + def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol + /** + * Should forward to `Run.compileLate`. The more principled fix would be to + * determine why this functionality is needed and extract it into a separate + * interface. + */ + protected def compileLate(srcfile: AbstractFile): Unit import SymbolLoadersStats._ protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = { @@ -75,14 +90,14 @@ abstract class SymbolLoaders { name+"\none of them needs to be removed from classpath" ) else if (settings.termConflict.value == "package") { - global.warning( + warning( "Resolving package/object name conflict in favor of package " + preExisting.fullName + ". The object will be inaccessible." ) root.info.decls.unlink(preExisting) } else { - global.warning( + warning( "Resolving package/object name conflict in favor of object " + preExisting.fullName + ". The package will be inaccessible." ) @@ -139,17 +154,17 @@ abstract class SymbolLoaders { /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep` */ - def initializeFromClassPath(owner: Symbol, classRep: ClassPath[platform.BinaryRepr]#ClassRep) { + def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) { ((classRep.binary, classRep.source) : @unchecked) match { case (Some(bin), Some(src)) if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) => if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path) - global.loaders.enterToplevelsFromSource(owner, classRep.name, src) + enterToplevelsFromSource(owner, classRep.name, src) case (None, Some(src)) => if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path) - global.loaders.enterToplevelsFromSource(owner, classRep.name, src) + enterToplevelsFromSource(owner, classRep.name, src) case (Some(bin), _) => - global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin)) + enterClassAndModule(owner, classRep.name, new ClassfileLoader(bin)) } } @@ -221,7 +236,7 @@ abstract class SymbolLoaders { /** * Load contents of a package */ - class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter { + class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter { protected def description = "package loader "+ classpath.name protected def doComplete(root: Symbol) { @@ -245,8 +260,24 @@ abstract class SymbolLoaders { class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter { private object classfileParser extends { - val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global - } with ClassfileParser + val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable + } with ClassfileParser { + override protected type ThisConstantPool = ConstantPool + override protected def newConstantPool: ThisConstantPool = new ConstantPool + override protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = + SymbolLoaders.this.lookupMemberAtTyperPhaseIfPossible(sym, name) + /* + * The type alias and the cast (where the alias is used) is needed due to problem described + * in SI-7585. In this particular case, the problem is that we need to make sure that symbol + * table used by symbol loaders is exactly the same as they one used by classfileParser. + * If you look at the path-dependent types we have here everything should work out ok but + * due to issue described in SI-7585 type-checker cannot tie the knot here. + * + */ + private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type } + val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined] + val classPath = platform.classPath + } protected def description = "class file "+ classfile.toString @@ -272,7 +303,7 @@ abstract class SymbolLoaders { protected def description = "source file "+ srcfile.toString override def fromSource = true override def sourcefile = Some(srcfile) - protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile) + protected def doComplete(root: Symbol): Unit = compileLate(srcfile) } object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 4c0c16690f..c6ea6b23e5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -17,6 +17,7 @@ import scala.reflect.internal.{ JavaAccFlags } import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs} import scala.tools.nsc.io.AbstractFile +import util.ClassPath /** This abstract class implements a class file parser. * @@ -24,18 +25,40 @@ import scala.tools.nsc.io.AbstractFile * @version 1.0 */ abstract class ClassfileParser { - val global: Global - import global._ + val symbolTable: SymbolTable { + def settings: Settings + } + val loaders: SymbolLoaders { + val symbolTable: ClassfileParser.this.symbolTable.type + } + + import symbolTable._ + /** + * If typer phase is defined then perform member lookup of a symbol + * `sym` at typer phase. This method results from refactoring. The + * original author of the logic that uses typer phase didn't explain + * why we need to force infos at that phase specifically. It only mentioned + * that ClassfileParse can be called late (e.g. at flatten phase) and + * we make to make sure we handle such situation properly. + */ + protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol + + /** The compiler classpath. */ + def classPath: ClassPath[AbstractFile] + import definitions._ import scala.reflect.internal.ClassfileConstants._ import Flags._ + protected type ThisConstantPool <: ConstantPool + protected def newConstantPool: ThisConstantPool + protected var in: AbstractFileReader = _ // the class file reader protected var clazz: Symbol = _ // the class symbol containing dynamic members protected var staticModule: Symbol = _ // the module symbol containing static members protected var instanceScope: Scope = _ // the scope of all instance definitions protected var staticScope: Scope = _ // the scope of all static definitions - protected var pool: ConstantPool = _ // the classfile's constant pool + protected var pool: ThisConstantPool = _ // the classfile's constant pool protected var isScala: Boolean = _ // does class file describe a scala class? protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation? protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info @@ -50,7 +73,7 @@ abstract class ClassfileParser { def srcfile = srcfile0 - private def optimized = global.settings.optimise.value + private def optimized = settings.optimise.value private def currentIsTopLevel = !(currentClass.decodedName containsChar '$') // u1, u2, and u4 are what these data types are called in the JVM spec. @@ -70,7 +93,7 @@ abstract class ClassfileParser { private def readType() = pool getType u2 private object unpickler extends scala.reflect.internal.pickling.UnPickler { - val global: ClassfileParser.this.global.type = ClassfileParser.this.global + val symbolTable: ClassfileParser.this.symbolTable.type = ClassfileParser.this.symbolTable } private def handleMissing(e: MissingRequirementError) = { @@ -119,7 +142,7 @@ abstract class ClassfileParser { this.isScala = false parseHeader() - this.pool = new ConstantPool + this.pool = newConstantPool parseClass() } } @@ -134,11 +157,14 @@ abstract class ClassfileParser { abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") } - class ConstantPool { - private val len = u2 - private val starts = new Array[Int](len) - private val values = new Array[AnyRef](len) - private val internalized = new Array[Name](len) + /** + * Constructor of this class should not be called directly, use `newConstantPool` instead. + */ + protected class ConstantPool { + protected val len = u2 + protected val starts = new Array[Int](len) + protected val values = new Array[AnyRef](len) + protected val internalized = new Array[Name](len) { var i = 1 while (i < starts.length) { @@ -212,76 +238,13 @@ abstract class ClassfileParser { getExternalName((in getChar start).toInt) } - /** Return the symbol of the class member at `index`. - * The following special cases exist: - * - If the member refers to special `MODULE$` static field, return - * the symbol of the corresponding module. - * - If the member is a field, and is not found with the given name, - * another try is made by appending `nme.LOCAL_SUFFIX_STRING` - * - If no symbol is found in the right tpe, a new try is made in the - * companion class, in case the owner is an implementation class. - */ - def getMemberSymbol(index: Int, static: Boolean): Symbol = { - if (index <= 0 || len <= index) errorBadIndex(index) - var f = values(index).asInstanceOf[Symbol] - if (f eq null) { - val start = starts(index) - val first = in.buf(start).toInt - if (first != CONSTANT_FIELDREF && - first != CONSTANT_METHODREF && - first != CONSTANT_INTFMETHODREF) errorBadTag(start) - val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt) - debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName) - val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe) - debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0) - - forceMangledName(tpe0.typeSymbol.name, module = false) - val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe) - if (name == nme.MODULE_INSTANCE_FIELD) { - val index = in.getChar(start + 1).toInt - val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt) - //assert(name.endsWith("$"), "Not a module class: " + name) - f = forceMangledName(name dropRight 1, module = true) - if (f == NoSymbol) - f = rootMirror.getModuleByName(name dropRight 1) - } else { - val origName = nme.unexpandedName(name) - val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol - f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe) - if (f == NoSymbol) - f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) - if (f == NoSymbol) { - // if it's an impl class, try to find it's static member inside the class - if (ownerTpe.typeSymbol.isImplClass) { - f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) - } else { - log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe) - f = tpe match { - case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos) - case _ => owner.newVariable(name.toTermName, owner.pos) - } - f setInfo tpe - log("created fake member " + f.fullName) - } - } - } - assert(f != NoSymbol, - s"could not find $name: $tpe in $ownerTpe" + ( - if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else "" - ) - ) - values(index) = f - } - f - } - /** Return a name and a type at the given index. If the type is a method * type, a dummy symbol is created in `ownerTpe`, which is used as the * owner of its value parameters. This might lead to inconsistencies, * if a symbol of the given name already exists, and has a different * type. */ - private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = { + protected def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = { if (index <= 0 || len <= index) errorBadIndex(index) values(index) match { case p: ((Name @unchecked, Type @unchecked)) => p @@ -381,37 +344,16 @@ abstract class ClassfileParser { } /** Throws an exception signaling a bad constant index. */ - private def errorBadIndex(index: Int) = + protected def errorBadIndex(index: Int) = abort(s"bad constant pool index: $index at pos: ${in.bp}") /** Throws an exception signaling a bad tag at given address. */ - private def errorBadTag(start: Int) = + protected def errorBadTag(start: Int) = abort("bad constant pool tag ${in.buf(start)} at byte $start") } - /** Try to force the chain of enclosing classes for the given name. Otherwise - * flatten would not lift classes that were not referenced in the source code. - */ - def forceMangledName(name: Name, module: Boolean): Symbol = { - val parts = name.decode.toString.split(Array('.', '$')) - var sym: Symbol = rootMirror.RootClass - - // was "at flatten.prev" - enteringFlatten { - for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) { - val sym1 = enteringIcode { - sym.linkedClassOfClass.info - sym.info.decl(part.encode) - }//.suchThat(module == _.isModule) - - sym = sym1 orElse sym.info.decl(part.encode.toTypeName) - } - } - sym - } - private def loadClassSymbol(name: Name): Symbol = { - val file = global.classPath findSourceFile ("" +name) getOrElse { + val file = classPath findSourceFile ("" +name) getOrElse { // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects // that are not in their correct place (see bug for details) @@ -419,7 +361,7 @@ abstract class ClassfileParser { warning(s"Class $name not found - continuing with a stub.") return NoSymbol.newClass(name.toTypeName) } - val completer = new global.loaders.ClassfileLoader(file) + val completer = new loaders.ClassfileLoader(file) var owner: Symbol = rootMirror.RootClass var sym: Symbol = NoSymbol var ss: Name = null @@ -607,9 +549,8 @@ abstract class ClassfileParser { if (name == nme.CONSTRUCTOR) sawPrivateConstructor = true in.skip(2); skipAttributes() - } - else { - if ((sflags & PRIVATE) != 0L && optimized) { + } else { + if ((sflags & PRIVATE) != 0L && optimized) { // TODO this should be !optimized, no? See c4181f656d. in.skip(4); skipAttributes() } else { val name = readName() @@ -619,7 +560,7 @@ abstract class ClassfileParser { info match { case MethodType(params, restpe) => // if this is a non-static inner class, remove the explicit outer parameter - val newParams = innerClasses getEntry currentClass match { + val paramsNoOuter = innerClasses getEntry currentClass match { case Some(entry) if !isScalaRaw && !entry.jflags.isStatic => /* About `clazz.owner.isPackage` below: SI-5957 * For every nested java class A$B, there are two symbols in the scala compiler. @@ -633,6 +574,15 @@ abstract class ClassfileParser { case _ => params } + val newParams = paramsNoOuter match { + case (init :+ tail) if jflags.isSynthetic => + // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which + // are added when an inner class needs to access a private constructor. + init + case _ => + paramsNoOuter + } + info = MethodType(newParams, clazz.tpe) } // Note: the info may be overrwritten later with a generic signature @@ -1057,7 +1007,7 @@ abstract class ClassfileParser { def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) { def jflags = entry.jflags - val completer = new global.loaders.ClassfileLoader(file) + val completer = new loaders.ClassfileLoader(file) val name = entry.originalName val sflags = jflags.toScalaFlags val owner = ownerForFlags(jflags) @@ -1065,7 +1015,7 @@ abstract class ClassfileParser { val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer - innerModule.moduleClass setInfo global.loaders.moduleClassLoader + innerModule.moduleClass setInfo loaders.moduleClassLoader List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file) scope enter innerClass @@ -1086,7 +1036,7 @@ abstract class ClassfileParser { for (entry <- innerClasses.entries) { // create a new class member for immediate inner classes if (entry.outerName == currentClass) { - val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse { + val file = classPath.findSourceFile(entry.externalName.toString) getOrElse { throw new AssertionError(entry.externalName) } enterClassAndModule(entry, file) @@ -1171,19 +1121,15 @@ abstract class ClassfileParser { case Some(entry) => innerSymbol(entry) case _ => NoSymbol } - // if loading during initialization of `definitions` typerPhase is not yet set. - // in that case we simply load the member at the current phase - @inline private def enteringTyperIfPossible(body: => Symbol): Symbol = - if (currentRun.typerPhase eq null) body else enteringTyper(body) private def innerSymbol(entry: InnerClassEntry): Symbol = { val name = entry.originalName.toTypeName val enclosing = entry.enclosing def getMember = ( if (enclosing == clazz) entry.scope lookup name - else enclosing.info member name + else lookupMemberAtTyperPhaseIfPossible(enclosing, name) ) - enteringTyperIfPossible(getMember) + getMember /* There used to be an assertion that this result is not NoSymbol; changing it to an error * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 01a117895f..f704d8ac89 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -20,6 +20,8 @@ import scala.reflect.internal.JavaAccFlags */ abstract class ICodeReader extends ClassfileParser { val global: Global + val symbolTable: global.type + val loaders: global.loaders.type import global._ import icodes._ @@ -28,6 +30,95 @@ abstract class ICodeReader extends ClassfileParser { var method: IMethod = NoIMethod // the current IMethod var isScalaModule = false + override protected type ThisConstantPool = ICodeConstantPool + override protected def newConstantPool = new ICodeConstantPool + + /** Try to force the chain of enclosing classes for the given name. Otherwise + * flatten would not lift classes that were not referenced in the source code. + */ + def forceMangledName(name: Name, module: Boolean): Symbol = { + val parts = name.decode.toString.split(Array('.', '$')) + var sym: Symbol = rootMirror.RootClass + + // was "at flatten.prev" + enteringFlatten { + for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) { + val sym1 = enteringIcode { + sym.linkedClassOfClass.info + sym.info.decl(part.encode) + }//.suchThat(module == _.isModule) + + sym = sym1 orElse sym.info.decl(part.encode.toTypeName) + } + } + sym + } + + protected class ICodeConstantPool extends ConstantPool { + /** Return the symbol of the class member at `index`. + * The following special cases exist: + * - If the member refers to special `MODULE$` static field, return + * the symbol of the corresponding module. + * - If the member is a field, and is not found with the given name, + * another try is made by appending `nme.LOCAL_SUFFIX_STRING` + * - If no symbol is found in the right tpe, a new try is made in the + * companion class, in case the owner is an implementation class. + */ + def getMemberSymbol(index: Int, static: Boolean): Symbol = { + if (index <= 0 || len <= index) errorBadIndex(index) + var f = values(index).asInstanceOf[Symbol] + if (f eq null) { + val start = starts(index) + val first = in.buf(start).toInt + if (first != CONSTANT_FIELDREF && + first != CONSTANT_METHODREF && + first != CONSTANT_INTFMETHODREF) errorBadTag(start) + val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt) + debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName) + val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe) + debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0) + + forceMangledName(tpe0.typeSymbol.name, module = false) + val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe) + if (name == nme.MODULE_INSTANCE_FIELD) { + val index = in.getChar(start + 1).toInt + val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt) + //assert(name.endsWith("$"), "Not a module class: " + name) + f = forceMangledName(name dropRight 1, module = true) + if (f == NoSymbol) + f = rootMirror.getModuleByName(name dropRight 1) + } else { + val origName = nme.unexpandedName(name) + val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol + f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe) + if (f == NoSymbol) + f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) + if (f == NoSymbol) { + // if it's an impl class, try to find it's static member inside the class + if (ownerTpe.typeSymbol.isImplClass) { + f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) + } else { + log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe) + f = tpe match { + case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos) + case _ => owner.newVariable(name.toTermName, owner.pos) + } + f setInfo tpe + log("created fake member " + f.fullName) + } + } + } + assert(f != NoSymbol, + s"could not find $name: $tpe in $ownerTpe" + ( + if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else "" + ) + ) + values(index) = f + } + f + } + } + /** Read back bytecode for the given class symbol. It returns * two IClass objects, one for static members and one * for non-static members. diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 3265af9f5b..9bad29097c 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -486,14 +486,10 @@ abstract class Pickler extends SubComponent { } } def putClassfileAnnotArg(carg: ClassfileAnnotArg) { - carg match { - case LiteralAnnotArg(const) => - putConstant(const) - case ArrayAnnotArg(args) => - if (putEntry(carg)) - args foreach putClassfileAnnotArg - case NestedAnnotArg(annInfo) => - putAnnotation(annInfo) + (carg: @unchecked) match { + case LiteralAnnotArg(const) => putConstant(const) + case ArrayAnnotArg(args) => if (putEntry(carg)) args foreach putClassfileAnnotArg + case NestedAnnotArg(annInfo) => putAnnotation(annInfo) } } val AnnotationInfo(tpe, args, assocs) = annot @@ -559,13 +555,10 @@ abstract class Pickler extends SubComponent { /** Write a ClassfileAnnotArg (argument to classfile annotation) */ def writeClassfileAnnotArg(carg: ClassfileAnnotArg) { - carg match { - case LiteralAnnotArg(const) => - writeRef(const) - case ArrayAnnotArg(args) => - writeRef(carg) - case NestedAnnotArg(annInfo) => - writeRef(annInfo) + (carg: @unchecked) match { + case LiteralAnnotArg(const) => writeRef(const) + case ArrayAnnotArg(args) => writeRef(carg) + case NestedAnnotArg(annInfo) => writeRef(annInfo) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index f527c30b8a..f089c8f5a5 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -488,8 +488,13 @@ trait MatchAnalysis extends MatchApproximation { object CounterExample { def prune(examples: List[CounterExample]): List[CounterExample] = { - val distinct = examples.filterNot(_ == NoExample).toSet - distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList + // SI-7669 Warning: we don't used examples.distinct here any more as + // we can have A != B && A.coveredBy(B) && B.coveredBy(A) + // with Nil and List(). + val result = mutable.Buffer[CounterExample]() + for (example <- examples if (!result.exists(example coveredBy _))) + result += example + result.toList } } @@ -591,7 +596,7 @@ trait MatchAnalysis extends MatchApproximation { private def unique(variable: Var): VariableAssignment = uniques.getOrElseUpdate(variable, { val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO - VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty) + VariableAssignment(variable, eqTo.toList, neqTo.toList) }) def apply(variable: Var): VariableAssignment = { @@ -605,7 +610,7 @@ trait MatchAnalysis extends MatchApproximation { else { findVar(pre) foreach { preVar => val outerCtor = this(preVar) - outerCtor.fields(field) = newCtor + outerCtor.addField(field, newCtor) } newCtor } @@ -613,7 +618,8 @@ trait MatchAnalysis extends MatchApproximation { } // node in the tree that describes how to construct a counter-example - case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) { + case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const]) { + private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty // need to prune since the model now incorporates all super types of a constant (needed for reachability) private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp))) private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp) @@ -622,6 +628,11 @@ trait MatchAnalysis extends MatchApproximation { private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors + def addField(symbol: Symbol, assign: VariableAssignment) { + // SI-7669 Only register this field if if this class contains it. + val shouldConstrainField = !symbol.isCaseAccessor || caseFieldAccs.contains(symbol) + if (shouldConstrainField) fields(symbol) = assign + } def allFieldAssignmentsLegal: Boolean = (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 1f8f13ae02..60641d6752 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -267,10 +267,6 @@ trait Contexts { self: Analyzer => /** Saved type bounds for type parameters which are narrowed in a GADT. */ var savedTypeBounds: List[(Symbol, Type)] = List() - /** Indentation level, in columns, for output under -Ytyper-debug */ - var typingIndentLevel: Int = 0 - def typingIndent = " " * typingIndentLevel - /** The next enclosing context (potentially `this`) that is owned by a class or method */ def enclClassOrMethod: Context = if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this @@ -282,6 +278,11 @@ trait Contexts { self: Analyzer => /** ...or an Apply. */ def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply]) + def siteString = { + def what_s = if (owner.isConstructor) "" else owner.kindString + def where_s = if (owner.isClass) "" else "in " + enclClass.owner.decodedName + List(what_s, owner.decodedName, where_s) filterNot (_ == "") mkString " " + } // // Tracking undetermined type parameters for type argument inference. // @@ -445,7 +446,6 @@ trait Contexts { self: Analyzer => // Fields that are directly propagated c.variance = variance c.diagnostic = diagnostic - c.typingIndentLevel = typingIndentLevel c.openImplicits = openImplicits c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below. c._reportBuffer = reportBuffer diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8e79b56814..100112fec1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -31,8 +31,8 @@ trait Implicits { import global._ import definitions._ import ImplicitsStats._ - import typeDebug.{ ptBlock, ptLine } - import global.typer.{ printTyping, deindentTyping, indentTyping, printInference } + import typingStack.{ printTyping } + import typeDebug._ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos) @@ -60,25 +60,13 @@ trait Implicits { * @return A search result */ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { - printInference("[infer %s] %s with pt=%s in %s".format( - if (isView) "view" else "implicit", - tree, pt, context.owner.enclClass) - ) - printTyping( - ptBlock("infer implicit" + (if (isView) " view" else ""), - "tree" -> tree, - "pt" -> pt, - "undetparams" -> context.outer.undetparams - ) - ) - indentTyping() - + val shouldPrint = printTypings && !context.undetparams.isEmpty val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null val start = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null - if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty) - printTyping("typing implicit: %s %s".format(tree, context.undetparamsString)) + if (shouldPrint) + typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) { @@ -88,15 +76,13 @@ trait Implicits { }) debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors) } - printInference("[infer implicit] inferred " + result) context.undetparams = context.undetparams filterNot result.subst.from.contains if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start) if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart) if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart) if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart) - deindentTyping() - printTyping("Implicit search yielded: "+ result) + result } @@ -143,6 +129,7 @@ trait Implicits { private val implicitsCache = new LinkedHashMap[Type, Infoss] private val infoMapCache = new LinkedHashMap[Symbol, InfoMap] private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]() + private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 } private def isInvalidConversionTarget(tpe: Type): Boolean = tpe match { case Function1(_, out) => AnyRefClass.tpe <:< out @@ -325,18 +312,23 @@ trait Implicits { * (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument) * If it's set to NoPosition, then position-based services will use `tree.pos` */ - class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) - extends Typer(context0) with ImplicitsContextErrors { - printTyping( - ptBlock("new ImplicitSearch", - "tree" -> tree, - "pt" -> pt, - "isView" -> isView, - "context0" -> context0, - "undetparams" -> context.outer.undetparams - ) - ) -// assert(tree.isEmpty || tree.pos.isDefined, tree) + class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors { + val searchId = implicitSearchId() + private def typingLog(what: String, msg: String) = + typingStack.printTyping(tree, f"[search #$searchId] $what $msg") + + import infer._ + if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount) + + /** The type parameters to instantiate */ + val undetParams = if (isView) Nil else context.outer.undetparams + val wildPt = approximate(pt) + + def undet_s = if (undetParams.isEmpty) "" else undetParams.mkString(" inferring ", ", ", "") + def tree_s = typeDebug ptTree tree + def ctx_s = fullSiteString(context) + typingLog("start", s"`$tree_s`$undet_s, searching for adaptation to pt=$pt $ctx_s") + def pos = if (pos0 != NoPosition) pos0 else tree.pos def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = { @@ -344,8 +336,6 @@ trait Implicits { reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason) SearchFailure } - - import infer._ /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { @@ -418,14 +408,8 @@ trait Implicits { overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1)) } - if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount) - - /** The type parameters to instantiate */ - val undetParams = if (isView) List() else context.outer.undetparams - /** The expected type with all undetermined type parameters replaced with wildcards. */ def approximate(tp: Type) = deriveTypeWithWildcards(undetParams)(tp) - val wildPt = approximate(pt) /** Try to construct a typed tree from given implicit info with given * expected type. @@ -582,22 +566,12 @@ trait Implicits { private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = { if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits) - printTyping ( - ptBlock("typedImplicit0", - "info.name" -> info.name, - "ptChecked" -> ptChecked, - "pt" -> wildPt, - "orig" -> ptBlock("info", - "undetParams" -> undetParams, - "info.pre" -> info.pre - ).replaceAll("\\n", "\n ") - ) - ) - - if (ptChecked || matchesPt(info)) - typedImplicit1(info, isLocal) - else - SearchFailure + val ok = ptChecked || matchesPt(info) && { + def word = if (isLocal) "local " else "" + typingLog("match", s"$word$info") + true + } + if (ok) typedImplicit1(info, isLocal) else SearchFailure } private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = { @@ -618,9 +592,7 @@ trait Implicits { Select(gen.mkAttributedQualifier(info.pre), implicitMemberName) } } - printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format( - typeDebug.ptTree(itree), wildPt, info.name, info.tpe) - ) + typingLog("considering", typeDebug.ptTree(itree)) def fail(reason: String): SearchResult = failure(itree, reason) def fallback = typed1(itree, EXPRmode, wildPt) @@ -643,13 +615,10 @@ trait Implicits { if (Statistics.canEnable) Statistics.incCounter(typedImplicits) - printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt)) val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun } else adapt(itree1, EXPRmode, wildPt) - printTyping("adapted implicit %s:%s to %s".format( - itree1.symbol, itree2.tpe, wildPt) - ) + typingStack.showAdapt(itree, itree2, pt, context) def hasMatchingSymbol(tree: Tree): Boolean = (tree.symbol == info.sym) || { tree match { @@ -669,15 +638,9 @@ trait Implicits { val tvars = undetParams map freshVar def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars) - printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format( - if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "), - typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") }, - itree2.tpe, pt - )) - if (matchesPt(itree2.tpe, ptInstantiated, undetParams)) { if (tvars.nonEmpty) - printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr))) + typingLog("solve", ptLine("tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr))) val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), upper = false, lubDepth(List(itree2.tpe, pt))) @@ -729,7 +692,7 @@ trait Implicits { case None => val result = new SearchResult(itree2, subst) if (Statistics.canEnable) Statistics.incCounter(foundImplicits) - printInference("[success] found %s for pt %s".format(result, ptInstantiated)) + typingLog("success", s"inferred value of type $ptInstantiated is $result") result } } @@ -868,10 +831,7 @@ trait Implicits { matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) } if (eligible.nonEmpty) - printInference("[search%s] %s with pt=%s in %s, eligible:\n %s".format( - if (isView) " view" else "", - tree, pt, context.owner.enclClass, eligible.mkString("\n ")) - ) + printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") /** Faster implicit search. Overall idea: * - prune aggressively @@ -898,10 +858,7 @@ trait Implicits { try improves(i, alt) catch { case e: CyclicReference => - if (printInfers) { - println(i+" discarded because cyclic reference occurred") - e.printStackTrace() - } + debugwarn(s"Discarding $i during implicit search due to cyclic reference") true } }) @@ -1044,9 +1001,7 @@ trait Implicits { tp match { case TypeRef(pre, sym, args) => if (sym.isClass) { - if (!((sym.name == tpnme.REFINE_CLASS_NAME) || - (sym.name startsWith tpnme.ANON_CLASS_NAME) || - (sym.name == tpnme.ROOT))) { + if (!sym.isAnonOrRefinementClass && !sym.isRoot) { if (sym.isStatic && !(pending contains sym)) infoMap ++= { infoMapCache get sym match { @@ -1060,7 +1015,7 @@ trait Implicits { } else getClassParts(tp) - args foreach (getParts(_)) + args foreach getParts } } else if (sym.isAliasType) { getParts(tp.normalize) // SI-7180 Normalize needed to expand HK type refs @@ -1088,9 +1043,9 @@ trait Implicits { val infoMap = new InfoMap getParts(tp)(infoMap, new mutable.HashSet(), Set()) - printInference( - ptBlock("companionImplicitMap " + tp, infoMap.toSeq.map({ case (k, v) => ("" + k, v.mkString(", ")) }): _*) - ) + if (infoMap.nonEmpty) + printTyping(tree, infoMap.size + " implicits in companion scope") + infoMap } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 961ef484d8..06892053fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -20,8 +20,8 @@ trait Infer extends Checkable { import global._ import definitions._ - import typer.printInference import typeDebug.ptBlock + import typingStack.{ printTyping } /** The formal parameter types corresponding to `formals`. * If `formals` has a repeated last parameter, a list of @@ -216,8 +216,10 @@ trait Infer extends Checkable { def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Int): List[Type] = { - if (tvars.nonEmpty) - printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", ")) + if (tvars.nonEmpty) { + def tp_s = (tparams, tvars).zipped map { case (tp, tv) => s"${tp.name}/$tv" } mkString "," + printTyping(s"solving for $tp_s") + } if (!solve(tvars, tparams, variances, upper, depth)) { // no panic, it's good enough to just guess a solution, we'll find out @@ -987,21 +989,13 @@ trait Infer extends Checkable { * attempts fail, an error is produced. */ def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) { - printInference( - ptBlock("inferArgumentInstance", - "tree" -> tree, - "tree.tpe" -> tree.tpe, - "undetparams" -> undetparams, - "strictPt" -> strictPt, - "lenientPt" -> lenientPt - ) - ) + printTyping(tree, s"inferring arg instance based on pt0=$strictPt, pt1=$lenientPt") var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false) if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt)) targs = exprTypeArgs(undetparams, tree.tpe, lenientPt, useWeaklyCompatible = false) substExpr(tree, undetparams, targs, lenientPt) - printInference("[inferArgumentInstance] finished, targs = " + targs) + printTyping(tree, s"infer arg instance from pt0=$strictPt, pt1=$lenientPt; targs=$targs") } /** Infer type arguments `targs` for `tparams` of polymorphic expression in `tree`, given prototype `pt`. @@ -1013,29 +1007,20 @@ trait Infer extends Checkable { val treeTp = if (treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0 val tvars = tparams map freshVar val targs = exprTypeArgs(tvars, tparams, treeTp, pt, useWeaklyCompatible) - printInference( - ptBlock("inferExprInstance", - "tree" -> tree, - "tree.tpe"-> tree.tpe, - "tparams" -> tparams, - "pt" -> pt, - "targs" -> targs, - "tvars" -> tvars - ) - ) + def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString "," + printTyping(tree, s"infer expr instance from pt=$pt, $infer_s") if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226 substExpr(tree, tparams, targs, pt) List() } else { val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targs) - printInference( - ptBlock("inferExprInstance/AdjustedTypeArgs", - "okParams" -> okParams, - "okArgs" -> okArgs, - "leftUndet" -> leftUndet - ) - ) + def solved_s = map2(okParams, okArgs)((p, a) => s"$p=$a") mkString "," + def undet_s = leftUndet match { + case Nil => "" + case ps => ps.mkString(", undet=", ",", "") + } + printTyping(tree, s"infer solved $solved_s$undet_s") substExpr(tree, okParams, okArgs, pt) leftUndet } @@ -1077,14 +1062,6 @@ trait Infer extends Checkable { val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt) - printInference("[infer method] solving for %s in %s based on (%s)%s (%s)".format( - undetparams.map(_.name).mkString(", "), - fn.tpe, - argtpes.mkString(", "), - restpe, - (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "") - )) - if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) { val treeSubst = new TreeTypeSubstituter(okparams, okargs) treeSubst traverseTrees fn :: args diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 1282cfb416..cac6bd2ef2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -35,7 +35,8 @@ trait Namers extends MethodSynthesis { } def apply(tree: Tree) = { val r = transform(tree) - if (r.exists(_.isEmpty)) TypeTree() + if (r exists { case tt: TypeTree => tt.isEmpty case _ => false }) + TypeTree() else r } } @@ -1084,6 +1085,9 @@ trait Namers extends MethodSynthesis { overriddenTp = overriddenTp.resultType } + // SI-7668 Substitute parameters from the parent method with those of the overriding method. + overriddenTp = overriddenTp.substSym(overridden.paramss.flatten, vparamss.flatten.map(_.symbol)) + overriddenTp match { case NullaryMethodType(rtpe) => overriddenTp = rtpe case MethodType(List(), rtpe) => overriddenTp = rtpe diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index c9849eebb5..1b6963b598 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -249,7 +249,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans * 1.8.1 M's type is a subtype of O's type, or * 1.8.2 M is of type []S, O is of type ()T and S <: T, or * 1.8.3 M is of type ()S, O is of type []T and S <: T, or - * 1.9. If M is a macro def, O cannot be deferred. + * 1.9. If M is a macro def, O cannot be deferred unless there's a concrete method overriding O. * 1.10. If M is not a macro def, O cannot be a macro def. * 2. Check that only abstract classes have deferred members * 3. Check that concrete classes do not have deferred definitions @@ -440,7 +440,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred && member.isValue && !member.isLazy) { overrideError("must be declared lazy to override a concrete lazy value") - } else if (other.isDeferred && member.isTermMacro) { // (1.9) + } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9) overrideError("cannot be used here - term macros cannot override abstract methods") } else if (other.isTermMacro && !member.isTermMacro) { // (1.10) overrideError("cannot be used here - only term macros can override term macros") diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index a93baabc51..b4a37f9943 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -167,10 +167,10 @@ trait TypeDiagnostics { def explainAlias(tp: Type) = { // Don't automatically normalize standard aliases; they still will be // expanded if necessary to disambiguate simple identifiers. - if ((tp eq tp.normalize) || tp.typeSymbolDirect.isInDefaultNamespace) "" - else { + val deepDealias = DealiasedType(tp) + if (tp eq deepDealias) "" else { // A sanity check against expansion being identical to original. - val s = "" + DealiasedType(tp) + val s = "" + deepDealias if (s == "" + tp) "" else "\n (which expands to) " + s } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1a9a30c2ad..d2ff47626d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -26,7 +26,7 @@ import Mode._ * @author Martin Odersky * @version 1.0 */ -trait Typers extends Adaptations with Tags { +trait Typers extends Adaptations with Tags with TypersTracking { self: Analyzer => import global._ @@ -90,12 +90,6 @@ trait Typers extends Adaptations with Tags { private final val InterpolatorCodeRegex = """\$\{.*?\}""".r private final val InterpolatorIdentRegex = """\$\w+""".r - // To enable decent error messages when the typer crashes. - // TODO - this only catches trees which go through def typed, - // but there are all kinds of back ways - typedClassDef, etc. etc. - // Funnel everything through one doorway. - var lastTreeToTyper: Tree = EmptyTree - // when true: // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope) // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction @@ -104,7 +98,7 @@ trait Typers extends Adaptations with Tags { abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors { import context0.unit - import typeDebug.{ ptTree, ptBlock, ptLine } + import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed } import TyperErrorGen._ val infer = new Inferencer(context0) { @@ -1703,14 +1697,14 @@ trait Typers extends Adaptations with Tags { * So we strip the duplicates before typer. */ private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match { - case Nil => Nil - case x :: xs => - val sym = x.symbol + case Nil => Nil + case x :: xs => + val sym = x.symbol x :: fixDuplicateSyntheticParents( - if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym) - else xs - ) - } + if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym) + else xs + ) + } def typedParentTypes(templ: Template): List[Tree] = templ.parents match { case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe))) @@ -1730,17 +1724,18 @@ trait Typers extends Adaptations with Tags { typedPrimaryConstrBody(templ)(EmptyTree) supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt)) - } catch { - case ex: TypeError => - // fallback in case of cyclic errors - // @H none of the tests enter here but I couldn't rule it out + } + catch { + case ex: TypeError => + // fallback in case of cyclic errors + // @H none of the tests enter here but I couldn't rule it out // upd. @E when a definition inherits itself, we end up here // because `typedParentType` triggers `initialize` for parent types symbols - log("Type error calculating parents in template " + templ) - log("Error: " + ex) - ParentTypesError(templ, ex) - List(TypeTree(AnyRefTpe)) - } + log("Type error calculating parents in template " + templ) + log("Error: " + ex) + ParentTypesError(templ, ex) + List(TypeTree(AnyRefTpe)) + } } /** <p>Check that</p> @@ -2108,6 +2103,8 @@ trait Typers extends Adaptations with Tags { // !!! This method is redundant with other, less buggy ones. def decompose(call: Tree): (Tree, List[Tree]) = call match { + case _ if call.isErrorTyped => // e.g. SI-7636 + (call, Nil) case Apply(fn, args) => // an object cannot be allowed to pass a reference to itself to a superconstructor // because of initialization issues; SI-473, SI-3913, SI-6928. @@ -3510,7 +3507,7 @@ trait Typers extends Adaptations with Tags { else None case _ => None - } + } } /** @@ -3698,8 +3695,7 @@ trait Typers extends Adaptations with Tags { val Function(arg :: Nil, rhs) = typed(func, mode, funcType) rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil) - } - + } def annInfo(t: Tree): AnnotationInfo = t match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos) @@ -3725,21 +3721,22 @@ trait Typers extends Adaptations with Tags { if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation else annInfo(typedAnn) - }) } + ) + } /** Compute an existential type from raw hidden symbols `syms` and type `tp` */ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner) def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = ( - ctx.owner.isTerm && - (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || - { - var ctx1 = ctx.outer - while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer - (ctx1 != NoContext) && isReferencedFrom(ctx1, sym) - } + ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || { + var ctx1 = ctx.outer + while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) + ctx1 = ctx1.outer + + (ctx1 != NoContext) && isReferencedFrom(ctx1, sym) + } ) def isCapturedExistential(sym: Symbol) = ( @@ -4017,26 +4014,14 @@ trait Typers extends Adaptations with Tags { findSelection(cxTree) match { case Some((opName, treeInfo.Applied(_, targs, _))) => val fun = gen.mkTypeApply(Select(qual, opName), targs) + if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // SI-7617 atPos(qual.pos)(Apply(fun, Literal(Constant(name.decode)) :: Nil)) case _ => setError(tree) } } } - - def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = - silent(typeTree) orElse (err => DynamicRewriteError(tree, err)) - } - - final def deindentTyping() = context.typingIndentLevel -= 2 - final def indentTyping() = context.typingIndentLevel += 2 - @inline final def printTyping(s: => String) = { - if (printTypings) - println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent)) - } - @inline final def printInference(s: => String) = { - if (printInfers) - println(s) + def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err)) } def typed1(tree: Tree, mode: Mode, pt: Type): Tree = { @@ -4054,7 +4039,7 @@ trait Typers extends Adaptations with Tags { else lookupInOwner(qual.tpe.typeSymbol, name) orElse { NotAMemberError(tree, qual, name) NoSymbol - } + } ) def typedAnnotated(atd: Annotated): Tree = { @@ -4194,7 +4179,9 @@ trait Typers extends Adaptations with Tags { } def typedAssign(lhs: Tree, rhs: Tree): Tree = { - val lhs1 = typed(lhs, EXPRmode | LHSmode) + // see SI-7617 for an explanation of why macro expansion is suppressed + def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode) + val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs))) val varsym = lhs1.symbol // see #2494 for double error message example @@ -4422,56 +4409,55 @@ trait Typers extends Adaptations with Tags { val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null def onError(typeError: AbsTypeError): Tree = { - if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start) - - // If the problem is with raw types, copnvert to existentials and try again. - // See #4712 for a case where this situation arises, - if ((fun.symbol ne null) && fun.symbol.isJavaDefined) { - val newtpe = rawToExistential(fun.tpe) - if (fun.tpe ne newtpe) { - // println("late cooking: "+fun+":"+fun.tpe) // DEBUG - return tryTypedApply(fun setType newtpe, args) - } + if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start) + + // If the problem is with raw types, copnvert to existentials and try again. + // See #4712 for a case where this situation arises, + if ((fun.symbol ne null) && fun.symbol.isJavaDefined) { + val newtpe = rawToExistential(fun.tpe) + if (fun.tpe ne newtpe) { + // println("late cooking: "+fun+":"+fun.tpe) // DEBUG + return tryTypedApply(fun setType newtpe, args) } + } + def treesInResult(tree: Tree): List[Tree] = tree :: (tree match { + case Block(_, r) => treesInResult(r) + case Match(_, cases) => cases + case CaseDef(_, _, r) => treesInResult(r) + case Annotated(_, r) => treesInResult(r) + case If(_, t, e) => treesInResult(t) ++ treesInResult(e) + case Try(b, catches, _) => treesInResult(b) ++ catches + case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) + case _ => Nil + }) + def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos) - def treesInResult(tree: Tree): List[Tree] = tree :: (tree match { - case Block(_, r) => treesInResult(r) - case Match(_, cases) => cases - case CaseDef(_, _, r) => treesInResult(r) - case Annotated(_, r) => treesInResult(r) - case If(_, t, e) => treesInResult(t) ++ treesInResult(e) - case Try(b, catches, _) => treesInResult(b) ++ catches - case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) - case _ => Nil - }) - def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos) - - val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult) - printTyping { - val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ") - if (retry) "second try: " + funStr - else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos - } - if (retry) { - val Select(qual, name) = fun - tryTypedArgs(args, forArgMode(fun, mode)) match { - case Some(args1) => - val qual1 = - if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true) - else qual - if (qual1 ne qual) { - val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos - return context withinSecondTry typed1(tree1, mode, pt) - } - case _ => () - } + val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult) + typingStack.printTyping({ + val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ") + if (retry) "second try: " + funStr + else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos + }) + if (retry) { + val Select(qual, name) = fun + tryTypedArgs(args, forArgMode(fun, mode)) match { + case Some(args1) => + val qual1 = + if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true) + else qual + if (qual1 ne qual) { + val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos + return context withinSecondTry typed1(tree1, mode, pt) + } + case _ => () } - issue(typeError) - setError(treeCopy.Apply(tree, fun, args)) + } + issue(typeError) + setError(treeCopy.Apply(tree, fun, args)) } silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError - } + } def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = { // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` @@ -5319,51 +5305,47 @@ trait Typers extends Adaptations with Tags { def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - indentTyping() - - val ptPlugins = pluginsPt(pt, this, tree, mode) - + def body = ( + if (printTypings && !phase.erasedTypes && !noPrintTyping(tree)) + typingStack.nextTyped(tree, mode, pt, context)(typedInternal(tree, mode, pt)) + else + typedInternal(tree, mode, pt) + ) val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass) - try { - if (context.retyping && - (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) { - tree.clearType() - if (tree.hasSymbolField) tree.symbol = NoSymbol - } + try body + finally if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType) + } + private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = { + val ptPlugins = pluginsPt(pt, this, tree, mode) + def retypingOk = ( + context.retyping + && (tree.tpe ne null) + && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins)) + ) + def runTyper(): Tree = { + if (retypingOk) { + tree.tpe = null + if (tree.hasSymbol) tree.symbol = NoSymbol + } val alreadyTyped = tree.tpe ne null - val tree1: Tree = if (alreadyTyped) tree else { - printTyping( - ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins), - "undetparams" -> context.undetparams, - "implicitsEnabled" -> context.implicitsEnabled, - "enrichmentEnabled" -> context.enrichmentEnabled, - "mode" -> mode, - "silent" -> context.bufferErrors, - "context.owner" -> context.owner - ) - ) - val ptWild = if (mode.inPatternMode) - ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body. - else - dropExistential(ptPlugins) // FIXME: document why this is done. + val shouldPrint = !alreadyTyped && !phase.erasedTypes + val ptWild = if (mode.inPatternMode) + ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body. + else + dropExistential(ptPlugins) // FIXME: document why this is done. + val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode, ptWild) + if (shouldPrint) + typingStack.showTyped(tree1) - typed1(tree, mode, ptWild) - } // Can happen during erroneous compilation - error(s) have been // reported, but we need to avoid causing an NPE with this tree if (tree1.tpe eq null) return setError(tree) - if (!alreadyTyped) { - printTyping("typed %s: %s%s".format( - ptTree(tree1), tree1.tpe, - if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "") - ) - } - tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins)) + val result = if (tree1.isEmpty) tree1 else { @@ -5371,32 +5353,29 @@ trait Typers extends Adaptations with Tags { if (hasPendingMacroExpansions) macroExpandAll(this, result) else result } - if (!alreadyTyped) { - printTyping("adapted %s: %s to %s, %s".format( - tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString) - ) //DEBUG - } - if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result) + if (shouldPrint) + typingStack.showAdapt(tree1, result, ptPlugins, context) + + if (!isPastTyper) + signalDone(context.asInstanceOf[analyzer.Context], tree, result) + result - } catch { + } + + try runTyper() catch { case ex: TypeError => tree.clearType() // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere. - printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG - + typingStack.printTyping(tree, "caught %s: while typing %s".format(ex, tree)) //DEBUG reportTypeError(context, tree.pos, ex) setError(tree) case ex: Exception => - if (settings.debug) // @M causes cyclic reference error - Console.println("exception when typing "+tree+", pt = "+ptPlugins) + // @M causes cyclic reference error + devWarning(s"exception when typing $tree, pt=$ptPlugins") if (context != null && context.unit.exists && tree != null) logError("AT: " + (tree.pos).dbgString, ex) throw ex } - finally { - deindentTyping() - if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType) - } } def atOwner(owner: Symbol): Typer = diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala new file mode 100644 index 0000000000..f44fd412fd --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -0,0 +1,180 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package typechecker + +import scala.collection.mutable +import scala.reflect.internal.util.{ BatchSourceFile, Statistics } +import mutable.ListBuffer +import symtab.Flags._ +import Mode._ + +trait TypersTracking { + self: Analyzer => + + import global._ + import definitions._ + import typeDebug._ + + // To enable decent error messages when the typer crashes. + // TODO - this only catches trees which go through def typed, + // but there are all kinds of back ways - typedClassDef, etc. etc. + // Funnel everything through one doorway. + var lastTreeToTyper: Tree = EmptyTree + + def fullSiteString(context: Context): String = { + def owner_long_s = ( + if (settings.debug.value) { + def flags_s = context.owner.debugFlagString match { + case "" => "" + case s => " with flags " + inLightMagenta(s) + } + s", a ${context.owner.shortSymbolClass}$flags_s" + } + else "" + ) + def marker = if (context.bufferErrors) "silent" else "site" + def undet_s = context.undetparams match { + case Nil => "" + case ps => ps.mkString(" solving: ", ",", "") + } + def implicits_s = ( + if (context.enrichmentEnabled) + if (context.implicitsEnabled) "" + else inLightRed("enrichment only") + else inLightRed("implicits disabled") + ) + + s"($marker$undet_s: ${context.siteString}$owner_long_s) $implicits_s" + } + + object typingStack { + val out = new java.io.PrintWriter(System.err, true) + def println(msg: Any) = out println "" + msg + + // TODO - account for colors so the color of a multiline string + // doesn't infect the connector lines + private def currentIndent = "| " * depth + + private var trees: List[Frame] = Nil + private var depth = 0 + private def atLowerIndent[T](body: => T): T = { + depth -= 1 + try body finally depth += 1 + } + private def resetIfEmpty(s: String) = if (trees.isEmpty) resetColor(s) else s + + private def truncAndOneLine(s: String): String = { + val s1 = s.replaceAll("\\s+", " ") + if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..." + } + + private val nextId = { var x = 1 ; () => try x finally x += 1 } + private class Frame(val tree: Tree) { + val stamp = System.nanoTime + val id = nextId() + } + private object NoFrame extends Frame(EmptyTree) { } + private def greenType(tp: Type): String = tpe_s(tp, inGreen) + private def greenType(tree: Tree): String = tree match { + case null => "[exception]" + case md: MemberDef if md.tpe == NoType => inBlue(s"[${md.keyword} ${md.name}]") + " " + greenType(md.symbol.tpe) + case _ if tree.tpe.isComplete => greenType(tree.tpe) + case _ => "<?>" + } + def indented(s: String): String = + if (s == "") "" else currentIndent + s.replaceAll("\n", "\n" + currentIndent) + + @inline final def runWith[T](t: Tree)(body: => T): T = { + push(t) + try body finally pop(t) + } + def push(t: Tree): Unit = { + trees ::= new Frame(t) + depth += 1 + } + def pop(t: Tree): Unit = { + val frame = trees.head + assert(frame.tree eq t, ((frame.tree, t))) + trees = trees.tail + depth -= 1 + } + def show(s: String) { if (s != "") out.println(s) } + + def showPush(tree: Tree, context: Context) { + showPush(tree, NOmode, WildcardType, context) + } + def showPush(tree: Tree, mode: Mode, pt: Type, context: Context) { + val alreadyTyped = tree.tpe ne null + def tree_s = truncAndOneLine(ptTree(tree)) + def pt_s = if (pt.isWildcard || context.inTypeConstructorAllowed) "" else s": pt=$pt" + def all_s = List(tree_s, pt_s, mode, fullSiteString(context)) filterNot (_ == "") mkString " " + + atLowerIndent(show(indented("""|-- """ + all_s))) + } + def showPop(typedTree: Tree): Tree = { + val s = greenType(typedTree) + show(resetIfEmpty(indented("""\-> """ + s))) + typedTree + } + def showAdapt(original: Tree, adapted: Tree, pt: Type, context: Context) { + if (!noPrintAdapt(original, adapted)) { + def tree_s1 = inLightCyan(truncAndOneLine(ptTree(original))) + def pt_s = if (pt.isWildcard) "" else s" based on pt $pt" + def tree_s2 = adapted match { + case tt: TypeTree => "is now a TypeTree(" + tpe_s(tt.tpe, inCyan) + ")" + case _ => "adapted to " + inCyan(truncAndOneLine(ptTree(adapted))) + pt_s + } + show(indented(s"[adapt] $tree_s1 $tree_s2")) + } + } + def showTyped(tree: Tree) { + def class_s = tree match { + case _: RefTree => "" + case _ => " " + tree.shortClass + } + if (!noPrintTyping(tree)) + show(indented(s"[typed$class_s] " + truncAndOneLine(ptTree(tree)))) + } + + def nextTyped(tree: Tree, mode: Mode, pt: Type, context: Context)(body: => Tree): Tree = + nextTypedInternal(tree, showPush(tree, mode, pt, context))(body) + + def nextTyped(tree: Tree, context: Context)(body: => Tree): Tree = + nextTypedInternal(tree, showPush(tree, context))(body) + + def nextTypedInternal(tree: Tree, pushFn: => Unit)(body: => Tree): Tree = ( + if (noPrintTyping(tree)) + body + else + runWith(tree) { pushFn ; showPop(body) } + ) + + @inline final def printTyping(tree: Tree, s: => String) = { + if (printTypings && !noPrintTyping(tree)) + show(indented(s)) + } + @inline final def printTyping(s: => String) = { + if (printTypings) + show(indented(s)) + } + } + def tpe_s(tp: Type, colorize: String => String): String = tp match { + case OverloadedType(pre, alts) => alts map (alt => tpe_s(pre memberType alt, colorize)) mkString " <and> " + case _ => colorize(tp.toLongString) + } + // def sym_s(s: Symbol) = if (s eq null) "" + s else s.getClass.getName split '.' last; + + // Some trees which are typed with mind-numbing frequency and + // which add nothing by being printed. Did () type to Unit? Let's + // gamble on yes. + private def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) + def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t) + def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || ( + (tree1.tpe == tree2.tpe) + && (tree1.symbol == tree2.symbol) + ) +} diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index 1e43d18900..752aac5c8c 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -86,12 +86,6 @@ package object util { @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0") type StringOps = scala.reflect.internal.util.StringOps - @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0") - val TableDef = scala.reflect.internal.util.TableDef - - @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0") - type TableDef[T] = scala.reflect.internal.util.TableDef[T] - @deprecated("scala.reflect.internal.util.WeakHashSet", "2.10.0") type WeakHashSet[T <: AnyRef] = scala.reflect.internal.util.WeakHashSet[T] diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath index 73a67e45ed..9e773a39d2 100644 --- a/src/eclipse/interactive/.classpath +++ b/src/eclipse/interactive/.classpath @@ -3,7 +3,7 @@ <classpathentry kind="src" path="interactive"/> <classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> - <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/> - <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/> <classpathentry kind="output" path="build-quick-interactive"/> </classpath> diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath index b6ef5f35bb..c185bc5391 100644 --- a/src/eclipse/scala-compiler/.classpath +++ b/src/eclipse/scala-compiler/.classpath @@ -1,10 +1,10 @@ <?xml version="1.0" encoding="UTF-8"?> <classpath> <classpathentry kind="src" path="compiler"/> - <classpathentry combineaccessrules="false" kind="src" path="/asm"/> - <classpathentry combineaccessrules="false" kind="src" path="/reflect"/> - <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/> - <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/> + <classpathentry combineaccessrules="false" exported="true" kind="src" path="/asm"/> + <classpathentry combineaccessrules="false" exported="true" kind="src" path="/reflect"/> + <classpathentry combineaccessrules="false" exported="true" kind="src" path="/scala-library"/> + <classpathentry combineaccessrules="false" exported="true" kind="src" path="/continuations-library"/> <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> <classpathentry kind="output" path="build-quick-compiler"/> diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath index caafcf33b0..8e03c97657 100644 --- a/src/eclipse/scaladoc/.classpath +++ b/src/eclipse/scaladoc/.classpath @@ -3,8 +3,10 @@ <classpathentry kind="src" path="scaladoc"/> <classpathentry combineaccessrules="false" kind="src" path="/partest"/> <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/> - <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/> - <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-xml"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-parser-combinators"/> <classpathentry kind="output" path="build-quick-scaladoc"/> </classpath> diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath index 8e4f88e0f0..fe3c3e4f18 100644 --- a/src/eclipse/test-junit/.classpath +++ b/src/eclipse/test-junit/.classpath @@ -7,5 +7,6 @@ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/> <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> + <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/> <classpathentry kind="output" path="build-test-junit"/> </classpath> diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 28b84d67ba..492f0f4fb4 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -365,13 +365,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") */ override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym } + protected type SymbolLoadersInInteractive = GlobalSymbolLoaders { + val global: Global.this.type + val platform: Global.this.platform.type + } /** Symbol loaders in the IDE parse all source files loaded from a package for * top-level idents. Therefore, we can detect top-level symbols that have a name * different from their source file */ - override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders { + override lazy val loaders: SymbolLoadersInInteractive = new { val global: Global.this.type = Global.this - } + val platform: Global.this.platform.type = Global.this.platform + } with BrowsingLoaders // ----------------- Polling --------------------------------------- diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala index 4dfae047c3..0088620540 100644 --- a/src/library/scala/collection/mutable/DefaultMapModel.scala +++ b/src/library/scala/collection/mutable/DefaultMapModel.scala @@ -19,6 +19,7 @@ package mutable * @version 1.0, 08/07/2003 * @since 1 */ +@deprecated("This trait will be removed.", "2.11.0") trait DefaultMapModel[A, B] extends Map[A, B] { type Entry = DefaultEntry[A, B] diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index b072cd653b..411b89701b 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -247,10 +247,15 @@ trait Future[+T] extends Awaitable[T] { * $forComprehensionExamples */ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = { - val p = Promise[S]() + import impl.Promise.DefaultPromise + val p = new DefaultPromise[S]() onComplete { case f: Failure[_] => p complete f.asInstanceOf[Failure[S]] - case Success(v) => try f(v) onComplete p.complete catch { case NonFatal(t) => p failure t } + case Success(v) => try f(v) match { + // If possible, link DefaultPromises to avoid space leaks + case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p) + case fut => fut onComplete p.complete + } catch { case NonFatal(t) => p failure t } } p.future } diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index f950b13b78..cdde019cd0 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -82,7 +82,7 @@ trait Promise[T] { /** Completes the promise with a value. * - * @param v The value to complete the promise with. + * @param value The value to complete the promise with. * * $promiseCompletion */ @@ -98,7 +98,7 @@ trait Promise[T] { /** Completes the promise with an exception. * - * @param t The throwable to complete the promise with. + * @param cause The throwable to complete the promise with. * * $allowedThrowables * diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index ffaea8de96..35511856ee 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -56,7 +56,9 @@ private[concurrent] object Promise { case t => Failure(t) } - /* + /** + * Latch used to implement waiting on a DefaultPromise's result. + * * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at @@ -73,10 +75,122 @@ private[concurrent] object Promise { /** Default promise implementation. + * + * A DefaultPromise has three possible states. It can be: + * + * 1. Incomplete, with an associated list of callbacks waiting on completion. + * 2. Complete, with a result. + * 3. Linked to another DefaultPromise. + * + * If a DefaultPromise is linked it another DefaultPromise then it will + * delegate all its operations to that other promise. This means that two + * DefaultPromises that are linked will appear, to external callers, to have + * exactly the same state and behaviour. E.g. they will both appear to be + * either complete or incomplete, and with the same values. + * + * A DefaultPromise stores its state entirely in the AnyRef cell exposed by + * AbstractPromise. The type of object stored in the cell fully describes the + * current state of the promise. + * + * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks + * to call when it is eventually completed. + * 2. Try[T] - The promise is complete and now contains its value. + * 3. DefaultPromise[T] - The promise is linked to another promise. + * + * The ability to link DefaultPromises is needed to prevent memory leaks when + * using Future.flatMap. The previous implementation of Future.flatMap used + * onComplete handlers to propagate the ultimate value of a flatMap operation + * to its promise. Recursive calls to flatMap built a chain of onComplete + * handlers and promises. Unfortunately none of the handlers or promises in + * the chain could be collected until the handlers had been called and + * detached, which only happened when the final flatMap future was completed. + * (In some situations, such as infinite streams, this would never actually + * happen.) Because of the fact that the promise implementation internally + * created references between promises, and these references were invisible to + * user code, it was easy for user code to accidentally build large chains of + * promises and thereby leak memory. + * + * The problem of leaks is solved by automatically breaking these chains of + * promises, so that promises don't refer to each other in a long chain. This + * allows each promise to be individually collected. The idea is to "flatten" + * the chain of promises, so that instead of each promise pointing to its + * neighbour, they instead point directly the promise at the root of the + * chain. This means that only the root promise is referenced, and all the + * other promises are available for garbage collection as soon as they're no + * longer referenced by user code. + * + * To make the chains flattenable, the concept of linking promises together + * needed to become an explicit feature of the DefaultPromise implementation, + * so that the implementation to navigate and rewire links as needed. The idea + * of linking promises is based on the [[Twitter promise implementation + * https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala]]. + * + * In practice, flattening the chain cannot always be done perfectly. When a + * promise is added to the end of the chain, it scans the chain and links + * directly to the root promise. This prevents the chain from growing forwards + * But the root promise for a chain can change, causing the chain to grow + * backwards, and leaving all previously-linked promise pointing at a promise + * which is no longer the root promise. + * + * To mitigate the problem of the root promise changing, whenever a promise's + * methods are called, and it needs a reference to its root promise it calls + * the `compressedRoot()` method. This method re-scans the promise chain to + * get the root promise, and also compresses its links so that it links + * directly to whatever the current root promise is. This ensures that the + * chain is flattened whenever `compressedRoot()` is called. And since + * `compressedRoot()` is called at every possible opportunity (when getting a + * promise's value, when adding an onComplete handler, etc), this will happen + * frequently. Unfortunately, even this eager relinking doesn't absolutely + * guarantee that the chain will be flattened and that leaks cannot occur. + * However eager relinking does greatly reduce the chance that leaks will + * occur. + * + * Future.flatMap links DefaultPromises together by calling the `linkRootOf` + * method. This is the only externally visible interface to linked + * DefaultPromises, and `linkedRootOf` is currently only designed to be called + * by Future.flatMap. */ class DefaultPromise[T] extends AbstractPromise with Promise[T] { self => - updateState(null, Nil) // Start at "No callbacks" + updateState(null, Nil) // The promise is incomplete and has no callbacks + + /** Get the root promise for this promise, compressing the link chain to that + * promise if necessary. + * + * For promises that are not linked, the result of calling + * `compressedRoot()` will the promise itself. However for linked promises, + * this method will traverse each link until it locates the root promise at + * the base of the link chain. + * + * As a side effect of calling this method, the link from this promise back + * to the root promise will be updated ("compressed") to point directly to + * the root promise. This allows intermediate promises in the link chain to + * be garbage collected. Also, subsequent calls to this method should be + * faster as the link chain will be shorter. + */ + @tailrec + private def compressedRoot(): DefaultPromise[T] = { + getState match { + case linked: DefaultPromise[_] => + val target = linked.asInstanceOf[DefaultPromise[T]].root + if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot() + case _ => this + } + } + /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`. + * The `compressedRoot()` method should be called instead of this method, as it is important + * to compress the link chain whenever possible. + */ + @tailrec + private def root: DefaultPromise[T] = { + getState match { + case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root + case _ => this + } + } + + /** Try waiting for this promise to be completed. + */ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) { import Duration.Undefined import scala.concurrent.Future.InternalCallbackExecutor @@ -108,42 +222,96 @@ private[concurrent] object Promise { def result(atMost: Duration)(implicit permit: CanAwait): T = ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here - def value: Option[Try[T]] = getState match { + def value: Option[Try[T]] = value0 + + @tailrec + private def value0: Option[Try[T]] = getState match { case c: Try[_] => Some(c.asInstanceOf[Try[T]]) + case _: DefaultPromise[_] => compressedRoot().value0 case _ => None } - override def isCompleted: Boolean = getState.isInstanceOf[Try[_]] + override def isCompleted: Boolean = isCompleted0 + + @tailrec + private def isCompleted0: Boolean = getState match { + case _: Try[_] => true + case _: DefaultPromise[_] => compressedRoot().isCompleted0 + case _ => false + } def tryComplete(value: Try[T]): Boolean = { val resolved = resolveTry(value) - @tailrec - def tryComplete(v: Try[T]): List[CallbackRunnable[T]] = { - getState match { - case raw: List[_] => - val cur = raw.asInstanceOf[List[CallbackRunnable[T]]] - if (updateState(cur, v)) cur else tryComplete(v) - case _ => null - } - } - tryComplete(resolved) match { + tryCompleteAndGetListeners(resolved) match { case null => false case rs if rs.isEmpty => true case rs => rs.foreach(r => r.executeWithValue(resolved)); true } } + /** Called by `tryComplete` to store the resolved value and get the list of + * listeners, or `null` if it is already completed. + */ + @tailrec + private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = { + getState match { + case raw: List[_] => + val cur = raw.asInstanceOf[List[CallbackRunnable[T]]] + if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v) + case _: DefaultPromise[_] => + compressedRoot().tryCompleteAndGetListeners(v) + case _ => null + } + } + def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { val preparedEC = executor.prepare() val runnable = new CallbackRunnable[T](preparedEC, func) + dispatchOrAddCallback(runnable) + } + + /** Tries to add the callback, if already completed, it dispatches the callback to be executed. + * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks + * to the root promise when linking two promises togehter. + */ + @tailrec + private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = { + getState match { + case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]]) + case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable) + case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable) + } + } - @tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed - def dispatchOrAddCallback(): Unit = - getState match { - case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]]) - case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback() - } - dispatchOrAddCallback() + /** Link this promise to the root of another promise using `link()`. Should only be + * be called by Future.flatMap. + */ + protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot()) + + /** Link this promise to another promise so that both promises share the same + * externally-visible state. Depending on the current state of this promise, this + * may involve different things. For example, any onComplete listeners will need + * to be transferred. + * + * If this promise is already completed, then the same effect as linking - + * sharing the same completed value - is achieved by simply sending this + * promise's result to the target promise. + */ + @tailrec + private def link(target: DefaultPromise[T]): Unit = if (this ne target) { + getState match { + case r: Try[_] => + if (!target.tryComplete(r.asInstanceOf[Try[T]])) { + // Currently linking is done from Future.flatMap, which should ensure only + // one promise can be completed. Therefore this situation is unexpected. + throw new IllegalStateException("Cannot link completed promises together") + } + case _: DefaultPromise[_] => + compressedRoot().link(target) + case listeners: List[_] => if (updateState(listeners, target)) { + if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_)) + } else link(target) + } } } diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala index 59d5b1bf93..aa55ac4f0f 100644 --- a/src/library/scala/text/Document.scala +++ b/src/library/scala/text/Document.scala @@ -10,11 +10,17 @@ package scala.text import java.io.Writer +@deprecated("This object will be removed.", "2.11.0") case object DocNil extends Document +@deprecated("This object will be removed.", "2.11.0") case object DocBreak extends Document +@deprecated("This class will be removed.", "2.11.0") case class DocText(txt: String) extends Document +@deprecated("This class will be removed.", "2.11.0") case class DocGroup(doc: Document) extends Document +@deprecated("This class will be removed.", "2.11.0") case class DocNest(indent: Int, doc: Document) extends Document +@deprecated("This class will be removed.", "2.11.0") case class DocCons(hd: Document, tl: Document) extends Document /** @@ -24,6 +30,7 @@ case class DocCons(hd: Document, tl: Document) extends Document * @author Michel Schinz * @version 1.0 */ +@deprecated("This class will be removed.", "2.11.0") abstract class Document { def ::(hd: Document): Document = DocCons(hd, this) def ::(hd: String): Document = DocCons(DocText(hd), this) @@ -96,6 +103,7 @@ abstract class Document { } } +@deprecated("This object will be removed.", "2.11.0") object Document { /** The empty document */ def empty = DocNil diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 1ed3f4becb..523270b31c 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -216,7 +216,7 @@ object Either { * r.merge: Seq[Int] // Vector(1) * }}} */ - implicit class MergeableEither[A](x: Either[A, A]) { + implicit class MergeableEither[A](val x: Either[A, A]) extends AnyVal { def merge: A = x match { case Left(a) => a case Right(a) => a diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index fef9cef246..02c461f3c6 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -131,6 +131,10 @@ private[scala] trait PropertiesTrait { def javaVmName = propOrEmpty("java.vm.name") def javaVmVendor = propOrEmpty("java.vm.vendor") def javaVmVersion = propOrEmpty("java.vm.version") + // this property must remain less-well-known until 2.11 + private def javaSpecVersion = propOrEmpty("java.specification.version") + //private def javaSpecVendor = propOrEmpty("java.specification.vendor") + //private def javaSpecName = propOrEmpty("java.specification.name") def osName = propOrEmpty("os.name") def scalaHome = propOrEmpty("scala.home") def tmpDir = propOrEmpty("java.io.tmpdir") @@ -158,18 +162,29 @@ private[scala] trait PropertiesTrait { def scalaCmd = if (isWin) "scala.bat" else "scala" def scalacCmd = if (isWin) "scalac.bat" else "scalac" - /** Can the java version be determined to be at least as high as the argument? - * Hard to properly future proof this but at the rate 1.7 is going we can leave - * the issue for our cyborg grandchildren to solve. + /** Compares the given specification version to the specification version of the platform. + * + * @param version a specification version of the form "major.minor" + * @return `true` iff the specification version of the current runtime + * is equal to or higher than the version denoted by the given string. + * @throws NumberFormatException if the given string is not a version string + * + * @example {{{ + * // In this example, the runtime's Java specification is assumed to be at version 1.7. + * isJavaAtLeast("1.6") // true + * isJavaAtLeast("1.7") // true + * isJavaAtLeast("1.8") // false + * }} */ - def isJavaAtLeast(version: String) = { - val okVersions = version match { - case "1.5" => List("1.5", "1.6", "1.7") - case "1.6" => List("1.6", "1.7") - case "1.7" => List("1.7") - case _ => Nil + def isJavaAtLeast(version: String): Boolean = { + def parts(x: String) = { + val i = x.indexOf('.') + if (i < 0) throw new NumberFormatException("Not a version: " + x) + (x.substring(0, i), x.substring(i+1, x.length)) } - okVersions exists (javaVersion startsWith _) + val (v, _v) = parts(version) + val (s, _s) = parts(javaSpecVersion) + s.toInt >= v.toInt && _s.toInt >= _v.toInt } // provide a main method so version info can be obtained by running this diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala index e6f314b712..4182b7d0ba 100644 --- a/src/reflect/scala/reflect/api/Importers.scala +++ b/src/reflect/scala/reflect/api/Importers.scala @@ -9,7 +9,7 @@ package api * ''Note: this trait should typically be used only rarely.'' * * Reflection artifacts, such as [[scala.reflect.api.Symbols Symbols]] and [[scala.reflect.api.Types Types]], - * are contained in [[scala.reflect.api.Universes Universe]]s. Typically all processing happens + * are contained in [[scala.reflect.api.Universe Universe]]s. Typically all processing happens * within a single `Universe` (e.g. a compile-time macro `Universe` or a runtime reflection `Universe`), but sometimes * there is a need to migrate artifacts from one `Universe` to another. For example, runtime compilation works by * importing runtime reflection trees into a runtime compiler universe, compiling the importees and exporting the @@ -101,4 +101,4 @@ trait Importers { self: Universe => */ def importPosition(pos: from.Position): Position } -}
\ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 6a9fa9a884..6b7aa2dddf 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -580,10 +580,11 @@ trait Definitions extends api.StandardDefinitions { } val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22 + lazy val ProductClass: Array[ClassSymbol] = prepend(UnitClass, mkArityArray("Product", MaxProductArity, 1)) - lazy val TupleClass: Array[Symbol] = prepend(NoSymbol, mkArityArray("Tuple", MaxTupleArity, 1)) - lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0) - lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0) + lazy val TupleClass: Array[Symbol] = prepend(null, mkArityArray("Tuple", MaxTupleArity, 1)) + lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0) + lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0) /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems) @@ -608,6 +609,9 @@ trait Definitions extends api.StandardDefinitions { // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional? def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) def isProductNClass(sym: Symbol) = ProductClass contains sym + def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) + def isFunctionSymbol(sym: Symbol) = FunctionClass contains unspecializedSymbol(sym) + def isProductNSymbol(sym: Symbol) = ProductClass contains unspecializedSymbol(sym) def unspecializedSymbol(sym: Symbol): Symbol = { if (sym hasFlag SPECIALIZED) { @@ -618,31 +622,8 @@ trait Definitions extends api.StandardDefinitions { } else sym } - - // Checks whether the given type is true for the given condition, - // or if it is a specialized subtype of a type for which it is true. - // - // Origins notes: - // An issue was introduced with specialization in that the implementation - // of "isTupleType" in Definitions relied upon sym == TupleClass(elems.length). - // This test is untrue for specialized tuples, causing mysterious behavior - // because only some tuples are specialized. - def isPossiblySpecializedType(tp: Type)(cond: Type => Boolean) = { - cond(tp) || (tp match { - case TypeRef(pre, sym, args) if sym hasFlag SPECIALIZED => - cond(tp baseType unspecializedSymbol(sym)) - case _ => - false - }) - } - // No normalization. - def isTupleTypeDirect(tp: Type) = isPossiblySpecializedType(tp) { - case TypeRef(_, sym, args) if args.nonEmpty => - val len = args.length - len <= MaxTupleArity && sym == TupleClass(len) - case _ => false - } - def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden) + def unspecializedTypeArgs(tp: Type): List[Type] = + (tp baseType unspecializedSymbol(tp.typeSymbolDirect)).typeArgs def isMacroBundleType(tp: Type) = { val isNonTrivial = tp != ErrorType && tp != NothingTpe && tp != NullTpe @@ -654,6 +635,16 @@ trait Definitions extends api.StandardDefinitions { def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass) + // These "direct" calls perform no dealiasing. They are most needed when + // printing types when one wants to preserve the true nature of the type. + def isFunctionTypeDirect(tp: Type) = isFunctionSymbol(tp.typeSymbolDirect) + def isTupleTypeDirect(tp: Type) = isTupleSymbol(tp.typeSymbolDirect) + + // Note that these call .dealiasWiden and not .normalize, the latter of which + // tends to change the course of events by forcing types. + def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden) + def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden) + lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product] def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity) def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement) @@ -662,9 +653,13 @@ trait Definitions extends api.StandardDefinitions { def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_) def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j)) + def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) + + /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */ + def isExactProductType(tp: Type): Boolean = isProductNSymbol(tp.typeSymbol) /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */ - def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match { + def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match { case Some(x) => tpe.baseType(x).typeArgs case _ => Nil } @@ -683,13 +678,9 @@ trait Definitions extends api.StandardDefinitions { assert(isFunctionType(tp), tp) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last) } - - def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match { - case TypeRef(_, sym, args) if args.nonEmpty => - val arity = args.length - 1 // -1 is the return type - arity <= MaxFunctionArity && sym == FunctionClass(arity) - case _ => - false + def functionNBaseType(tp: Type): Type = tp.baseClasses find isFunctionSymbol match { + case Some(sym) => tp baseType unspecializedSymbol(sym) + case _ => tp } def isPartialFunctionType(tp: Type): Boolean = { diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index f8584ac9b0..b0828e9c54 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -434,6 +434,8 @@ trait Importers extends api.Importers { to: SymbolTable => ScalaSigBytes(bytes) case from.NestedAnnotArg(annInfo) => NestedAnnotArg(importAnnotationInfo(annInfo)) + case from.UnmappableAnnotArg => + UnmappableAnnotArg } // todo. careful import of positions diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 2ae9f81a09..c340670635 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -52,6 +52,12 @@ abstract class SymbolTable extends macros.Universe def globalError(msg: String): Unit = abort(msg) def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg)) + protected def elapsedMessage(msg: String, start: Long) = + msg + " in " + (System.currentTimeMillis() - start) + "ms" + + def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]") + def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) + def shouldLogAtThisPhase = false def isPastTyper = false diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 424296c212..e41038cafc 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3152,6 +3152,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def children = childSet override def addChild(sym: Symbol) { childSet = childSet + sym } + def anonOrRefinementString = { + if (hasCompleteInfo) { + val label = if (isAnonymousClass) "$anon:" else "refinement of" + val parents = parentsString(info.parents map functionNBaseType filterNot (_.typeSymbol == SerializableClass)) + s"<$label $parents>" + } + else if (isAnonymousClass) "$anon" + else nameString + } + override def toString = ( + if (isAnonOrRefinementClass) anonOrRefinementString + else super.toString + ) + if (Statistics.hotEnabled) Statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 71f84ab557..9c1342e68e 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -7,11 +7,73 @@ package scala package reflect package internal +import util.shortClassOfInstance + trait TypeDebugging { self: SymbolTable => - // @M toString that is safe during debugging (does not normalize, ...) + import definitions._ + + /** There's a whole lot of implementation detail which is nothing but noise when + * you are trying to see what's going on. This is my attempt to filter it out. + */ + object noPrint extends (Tree => Boolean) { + def skipScalaName(name: Name) = name match { + case tpnme.Any | tpnme.Nothing | tpnme.AnyRef => true + case _ => false + } + def skipRefTree(t: RefTree) = t match { + case Select(Select(Ident(nme.ROOTPKG), nme.scala_), name) if skipScalaName(name) => true + case Select(sel, name) if sel.symbol == ScalaPackage && skipScalaName(name) => true + case Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR) => true + case Ident(nme.ROOTPKG) => true + case _ => skipSym(t.symbol) + } + def skipSym(sym: Symbol): Boolean = sym match { + case null => false + case NothingClass | AnyClass => true + case PredefModule => true + case ObjectClass => true + case _ => sym.hasPackageFlag + } + def skipType(tpe: Type): Boolean = skipSym(tpe.typeSymbolDirect) + + def skip(t: Tree): Boolean = t match { + case EmptyTree => true + case PackageDef(_, _) => true + case t: RefTree => skipRefTree(t) + case TypeBoundsTree(lo, hi) => skip(lo) && skip(hi) + case Block(Nil, expr) => skip(expr) + case Apply(fn, Nil) => skip(fn) + case Block(stmt :: Nil, expr) => skip(stmt) && skip(expr) + case DefDef(_, nme.CONSTRUCTOR, Nil, Nil :: Nil, _, rhs) => skip(rhs) + case Literal(Constant(())) => true + case tt @ TypeTree() => skipType(tt.tpe) + case _ => skipSym(t.symbol) + } + def apply(t: Tree) = skip(t) + } + + /** Light color wrappers. + */ object typeDebug { + import scala.Console._ + + private val colorsOk = sys.props contains "scala.color" + private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s + private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s + + def inLightRed(s: String) = inColor(s, RED) + def inLightGreen(s: String) = inColor(s, GREEN) + def inLightMagenta(s: String) = inColor(s, MAGENTA) + def inLightCyan(s: String): String = inColor(s, CYAN) + def inGreen(s: String): String = inBold(s, GREEN) + def inRed(s: String): String = inBold(s, RED) + def inBlue(s: String): String = inBold(s, BLUE) + def inCyan(s: String): String = inBold(s, CYAN) + def inMagenta(s: String) = inBold(s, MAGENTA) + def resetColor(s: String): String = if (colorsOk) s + RESET else s + private def to_s(x: Any): String = x match { // otherwise case classes are caught looking like products case _: Tree | _: Type => "" + x @@ -29,16 +91,32 @@ trait TypeDebugging { strs.mkString(label + " {\n ", "\n ", "\n}") } } - def ptLine(label: String, pairs: (String, Any)*): String = { - val strs = pairs map { case (k, v) => k + "=" + to_s(v) } - strs.mkString(label + ": ", ", ", "") + def ptLine(pairs: (String, Any)*): String = ( + pairs + map { case (k, v) => (k, to_s(v)) } + filterNot { case (_, v) => v == "" } + map { case ("", v) => v ; case (k, v) => s"$k=$v" } + mkString ", " + ) + def ptTree(t: Tree): String = t match { + case PackageDef(pid, _) => s"package $pid" + case ModuleDef(_, name, _) => s"object $name" + case DefDef(_, name, tparams, _, _, _) => "def " + name + ptTypeParams(tparams) + case ClassDef(_, name, Nil, _) if t.symbol != null && t.symbol.isModuleClass => s"module class $name" + case ClassDef(_, name, tparams, _) => "class " + name + ptTypeParams(tparams) + case td: TypeDef => ptTypeParam(td) + case TypeBoundsTree(lo, hi) => + val lo_s = if (noPrint(lo)) "" else " >: " + ptTree(lo) + val hi_s = if (noPrint(hi)) "" else " <: " + ptTree(hi) + lo_s + hi_s + case _ if (t.symbol eq null) || (t.symbol eq NoSymbol) => to_s(t) + case _ => "" + t.symbol.tpe } - def ptTree(t: Tree) = t match { - case PackageDef(pid, _) => "package " + pid - case ModuleDef(_, name, _) => "object " + name - case ClassDef(_, name, tparams, _) => "class " + name + str.brackets(tparams) - case _ => to_s(t) + def ptTypeParam(td: TypeDef): String = { + val TypeDef(mods, name, tparams, rhs) = td + name + ptTypeParams(tparams) + ptTree(rhs) } + def ptTypeParams(tparams: List[TypeDef]): String = str brackets (tparams map ptTypeParam) object str { def parentheses(xs: List[_]): String = xs.mkString("(", ", ", ")") @@ -46,19 +124,24 @@ trait TypeDebugging { def tparams(tparams: List[Type]): String = brackets(tparams map debug) def parents(ps: List[Type]): String = (ps map debug).mkString(" with ") def refine(defs: Scope): String = defs.toList.mkString("{", " ;\n ", "}") + def bounds(lo: Type, hi: Type): String = { + val lo_s = if (typeIsNothing(lo)) "" else s" >: $lo" + val hi_s = if (typeIsAny(hi)) "" else s" <: $hi" + lo_s + hi_s + } } - + import str._ private def debug(tp: Type): String = tp match { - case TypeRef(pre, sym, args) => debug(pre) + "." + sym.nameString + str.tparams(args) - case ThisType(sym) => sym.nameString + ".this" - case SingleType(pre, sym) => debug(pre) +"."+ sym.nameString +".type" - case RefinedType(parents, defs) => str.parents(parents) + str.refine(defs) - case ClassInfoType(parents, defs, clazz) => "class "+ clazz.nameString + str.parents(parents) + str.refine(defs) - case PolyType(tparams, result) => str.brackets(tparams) + " " + debug(result) - case TypeBounds(lo, hi) => ">: "+ debug(lo) +" <: "+ debug(hi) - case tv @ TypeVar(_, _) => tv.toString - case ExistentialType(tparams, qtpe) => "forSome "+ str.brackets(tparams) + " " + debug(qtpe) - case _ => "?"+tp.getClass.getName+"?"//tp.toString might produce cyclic error... + case TypeRef(pre, sym, args) => s"${debug(pre)}.${sym.nameString}.${tparams(args)}" + case ThisType(sym) => s"${sym.nameString}.this" + case SingleType(pre, sym) => s"${debug(pre)}.${sym.nameString}.type" + case RefinedType(ps, decls) => s"${parents(ps)} ${refine(decls)}" + case ClassInfoType(ps, decls, clazz) => s"class ${clazz.nameString} ${parents(ps)} ${refine(decls)}" + case PolyType(tparams, result) => s"${brackets(tparams)}${debug(result)}" + case TypeBounds(lo, hi) => bounds(lo, hi) + case tv @ TypeVar(_, _) => "" + tv + case ExistentialType(tparams, qtpe) => s"forSome ${brackets(tparams)} ${debug(qtpe)}" + case _ => s"?${shortClassOfInstance(tp)}?" // tp.toString might produce cyclic error... } def debugString(tp: Type) = debug(tp) } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 967146a130..11527d88ca 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -929,7 +929,7 @@ trait Types * after `maxTostringRecursions` recursion levels. Uses `safeToString` * to produce a string on each level. */ - override def toString: String = typeToString(this) + override final def toString: String = typeToString(this) /** Method to be implemented in subclasses. * Converts this type to a string in calling toString for its parts. @@ -943,7 +943,9 @@ trait Types else if ((str endsWith ".type") && !typeSymbol.isModuleClass) widen match { case RefinedType(_, _) => "" + widen - case _ => s"$str (with underlying type $widen)" + case _ => + if (widen.toString.trim == "") str + else s"$str (with underlying type $widen)" } else str } @@ -1557,10 +1559,10 @@ trait Types override def isStructuralRefinement: Boolean = typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) - override def safeToString: String = parentsString(parents) + ( - (if (settings.debug || parents.isEmpty || (decls.elems ne null)) - fullyInitializeScope(decls).mkString("{", "; ", "}") else "") - ) + protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty + protected def initDecls = fullyInitializeScope(decls) + protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else "" + override def safeToString = parentsString(parents) + scopeString } protected def computeBaseClasses(tpe: Type): List[Symbol] = { @@ -1968,21 +1970,12 @@ trait Types } override def kind = "ClassInfoType" - - override def safeToString = - if (settings.debug || decls.size > 1) - formattedToString - else - super.safeToString - /** A nicely formatted string with newlines and such. */ - def formattedToString: String = - parents.mkString("\n with ") + ( - if (settings.debug || parents.isEmpty || (decls.elems ne null)) - fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}") - else "" - ) + def formattedToString = parents.mkString("\n with ") + scopeString + override protected def shouldForceScope = settings.debug || decls.size > 1 + override protected def scopeString = initDecls.mkString(" {\n ", "\n ", "\n}") + override def safeToString = if (shouldForceScope) formattedToString else super.safeToString } object ClassInfoType extends ClassInfoTypeExtractor @@ -2370,7 +2363,6 @@ trait Types } thisInfo.decls } - protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform override def baseTypeSeq: BaseTypeSeq = { @@ -2385,7 +2377,6 @@ trait Types baseTypeSeqCache } } - // ensure that symbol is not a local copy with a name coincidence private def needsPreString = ( settings.debug @@ -2395,46 +2386,50 @@ trait Types private def preString = if (needsPreString) pre.prefixString else "" private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]") - def refinementString = ( - if (sym.isStructuralRefinement) ( - fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic) - map (_.defString) - mkString("{", "; ", "}") - ) + private def refinementDecls = fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic) + private def refinementString = ( + if (sym.isStructuralRefinement) + refinementDecls map (_.defString) mkString("{", "; ", "}") else "" ) - protected def finishPrefix(rest: String) = ( if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes) parentsString(thisInfo.parents) + refinementString else rest ) + private def noArgsString = finishPrefix(preString + sym.nameString) + private def tupleTypeString: String = args match { + case Nil => noArgsString + case arg :: Nil => s"($arg,)" + case _ => args.mkString("(", ", ", ")") + } private def customToString = sym match { case RepeatedParamClass => args.head + "*" case ByNameParamClass => "=> " + args.head case _ => - def targs = dealiasWiden.typeArgs - - if (isFunctionType(this)) { + if (isFunctionTypeDirect(this)) { // Aesthetics: printing Function1 as T => R rather than (T) => R // ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable // from (T1, T2) => R. - targs match { - case in :: out :: Nil if !isTupleType(in) => - // A => B => C should be (A => B) => C or A => (B => C) + unspecializedTypeArgs(this) match { + // See neg/t588 for an example which arrives here - printing + // the type of a Function1 after erasure. + case Nil => noArgsString + case in :: out :: Nil if !isTupleTypeDirect(in) => + // A => B => C should be (A => B) => C or A => (B => C). // Also if A is byname, then we want (=> A) => B because => is right associative and => A => B // would mean => (A => B) which is a different type - val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in - val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out + val in_s = if (isFunctionTypeDirect(in) || isByNameParamType(in)) "(" + in + ")" else "" + in + val out_s = if (isFunctionTypeDirect(out)) "(" + out + ")" else "" + out in_s + " => " + out_s case xs => xs.init.mkString("(", ", ", ")") + " => " + xs.last } } - else if (isTupleType(this)) - targs.mkString("(", ", ", if (hasLength(targs, 1)) ",)" else ")") - else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne this.normalize)) - "" + normalize + else if (isTupleTypeDirect(this)) + tupleTypeString + else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne dealias)) + "" + dealias else "" } diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 6cffdbc193..f42dbf56e1 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -22,8 +22,8 @@ import scala.annotation.switch * @version 1.0 */ abstract class UnPickler { - val global: SymbolTable - import global._ + val symbolTable: SymbolTable + import symbolTable._ /** Unpickle symbol table information descending from a class and/or module root * from an array of bytes. diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 1626da2c93..e97aa662d8 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -5,27 +5,24 @@ import TableDef._ import scala.language.postfixOps /** A class for representing tabular data in a way that preserves - * its inner beauty. See Exceptional for an example usage. + * its inner beauty. * One creates an instance of TableDef by defining the columns of * the table, then uses that to create an instance of Table by * passing in a sequence of rows. */ class TableDef[T](_cols: Column[T]*) { - /** These operators are about all there is to it. - * - * ~ appends a column to the table - * >> creates a right-justified column and appends it - * << creates a left-justified column and appends it - * >+ specifies a string to separate the previous column from the next. - * if none is specified, a space is used. - */ + // These operators are about all there is to it. + /** Appends a column to the table. */ def ~(next: Column[T]) = retThis(cols :+= next) + /** Creates a right-justified column and appends it. */ def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = false) + /** Creates a left-justified column and appends it. */ def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = true) + /** Specifies a string to separate the previous column from the next. + * If none is specified, a space is used. */ def >+(sep: String) = retThis(separators += ((cols.size - 1, sep))) - /** Below this point should all be considered private/internal. - */ + // Below this point should all be considered private/internal. private var cols: List[Column[T]] = _cols.toList private var separators: Map[Int, String] = Map() diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 16405a88b4..93861b0899 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -529,7 +529,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni } private object unpickler extends UnPickler { - val global: thisUniverse.type = thisUniverse + val symbolTable: thisUniverse.type = thisUniverse } /** how connected???? diff --git a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala index d8a876feb2..8b0c6d78fa 100644 --- a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala @@ -34,6 +34,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper { + // ASAP + this setExpandEvents false + // working around protected/trait/java insufficiencies. def goBack(num: Int): Unit = back(num) if ((history: History) ne NoHistory) diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index ef6f4c2920..49bdd69a8b 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -44,33 +44,78 @@ class JavapClass( val (flags, upgraded) = upgrade(options) import flags.{ app, fun, help, raw } val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases - if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter))) - else if (targets.isEmpty) List(JpResult("No anonfuns found.")) - else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app))) + if (help || claases.isEmpty) + List(JpResult(JavapTool.helper(printWriter))) + else if (targets.isEmpty) + List(JpResult("No anonfuns found.")) + else + tool(raw, upgraded)(targets map (claas => targeted(claas, app))) } /** Cull our tool options. */ - private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match { - case (t,s) if s.nonEmpty => (t,s) - case (t,s) => (t, JavapTool.DefaultOptions) - } + private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = + ToolArgs fromArgs options match { + case (t, s) if s.nonEmpty => (t, s) + case (t, s) => (t, JavapTool.DefaultOptions) + } + + /** Associate the requested path with a possibly failed or empty array of bytes. */ + private def targeted(path: String, app: Boolean): (String, Try[Array[Byte]]) = + bytesFor(path, app) match { + case Success((target, bytes)) => (target, Try(bytes)) + case f: Failure[_] => (path, Failure(f.exception)) + } /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */ private def bytesFor(path: String, app: Boolean) = Try { def last = intp.get.mostRecentVar // fail if no intp - def req = if (path == "-") last else { - val s = path.splitHashMember - if (s._1.nonEmpty) s._1 - else s._2 getOrElse "#" + def req = path match { + case "-" => last + case HashSplit(prefix, member) => + if (prefix != null) prefix + else if (member != null) member + else "#" + } + val targetedBytes = if (app) findAppBody(req) else (req, findBytes(req)) + if (targetedBytes._2.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '$path'") + targetedBytes + } + + private def findAppBody(path: String): (String, Array[Byte]) = { + // is this new style delayedEndpoint? then find it. + // the name test is naive. could add $mangled path. + // assumes only the first match is of interest (because only one endpoint is generated). + def findNewStyle(bytes: Array[Byte]) = { + import scala.tools.asm.ClassReader + import scala.tools.asm.tree.ClassNode + import PartialFunction.cond + import JavaConverters._ + val rdr = new ClassReader(bytes) + val nod = new ClassNode + rdr.accept(nod, 0) + //foo/Bar.delayedEndpoint$foo$Bar$1 + val endpoint = "delayedEndpoint".r.unanchored + def isEndPoint(s: String) = (s contains '$') && cond(s) { case endpoint() => true } + nod.methods.asScala collectFirst { case m if isEndPoint(m.name) => m.name } } - def asAppBody(s: String) = { - val (cls, fix) = s.splitSuffix - s"${cls}$$delayedInit$$body${fix}" + // try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint + def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs => + findNewStyle(bs) map (n => (s"$path#$n", bs)) } - def todo = if (app) asAppBody(req) else req - val bytes = findBytes(todo) - if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'") - else bytes + // use old style, and add foo# to filter on apply method + def asOldStyle = { + def asAppBody(s: String) = { + val (cls, fix) = s.splitSuffix + s"${cls}$$delayedInit$$body${fix}" + } + val oldStyle = asAppBody(path) + val oldBytes = findBytes(oldStyle) + if (oldBytes.nonEmpty) (s"$oldStyle#", oldBytes) + else (path, oldBytes) + } + + val pathBytes = findBytes(path) + asNewStyle(pathBytes) getOrElse asOldStyle } def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path) @@ -496,6 +541,7 @@ object JavapClass { intp: Option[IMain] = None ) = new JavapClass(loader, printWriter, intp) + val HashSplit = "(.*?)(?:#([^#]*))?".r // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget // or a resource path com/acme/Widget.class; but not widget.out implicit class MaybeClassLike(val s: String) extends AnyVal { diff --git a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala index ea100b25f2..9346b0553f 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala @@ -39,7 +39,7 @@ class StdReplVals(final val r: ILoop) extends ReplVals { def lastRequest = intp.lastRequest class ReplImplicits extends power.Implicits2 { - import intp.global._ + import intp.global.Symbol private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global) implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym) diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 20f24dc753..723f8b1dc8 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -23,9 +23,11 @@ trait ScaladocGlobalTrait extends Global { val runsAfter = List[String]() val runsRightAfter = None } - override lazy val loaders = new SymbolLoaders { - val global: outer.type = outer + override lazy val loaders = new { + val global: outer.type = outer + val platform: outer.platform.type = outer.platform + } with GlobalSymbolLoaders { // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects // that are not in their correct place (see bug for details) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala b/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala deleted file mode 100644 index f8761ca0ed..0000000000 --- a/src/scalap/scala/tools/scalap/scalax/rules/Arrows.scala +++ /dev/null @@ -1,37 +0,0 @@ -package scala.tools.scalap -package scalax -package rules - -trait Arrows extends UnitFunctors { - type Arr[-A, +B] <: Arrow[A, B] - type M[+B] = Arr[Nothing, B] - - def arrow[A, B](f : A => B) : Arr[A, B] - def diag[A] = arrow[A, (A, A)] { a => (a, a) } - - override def unit[B](b : => B) : M[B] = arrow { any : Any => b } - - trait Arrow[-A, +B] extends Functor[B] { this : Arr[A, B] => - - def map[C](f : B => C) = comp(arrow(f)) - def comp[C](bc : => Arr[B, C]) : Arr[A, C] - def fst[C] : Arr[(A, C), (B, C)] - } -} - -trait ApplicativeArrows extends Arrows { - type Arr[-A, +B] <: ApplicativeArrow[A, B] - - def app[A, B] : Arr[(Arr[A, B], A), B] - - trait ApplicativeArrow[-A, +B] extends Arrow[A, B] { self : Arr[A, B] => - def flatMap[SubA <: A, C](f : B => Arr[SubA, C]) : Arr[SubA, C] = - diag[SubA].comp(map(f).fst[SubA]).comp(app[SubA, C]) - } -} - -trait ArrowMonads extends ApplicativeArrows with Monads { - type Arr[-A, +B] <: ApplicativeArrow[A, B] with Monad[B] - - override def unit[A](a : => A) : M[A] = arrow[Unit, A](Unit => a) -} diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala deleted file mode 100644 index aa852c1e63..0000000000 --- a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala +++ /dev/null @@ -1,81 +0,0 @@ -// ----------------------------------------------------------------------------- -// -// Scalax - The Scala Community Library -// Copyright (c) 2005-8 The Scalax Project. All rights reserved. -// -// The primary distribution site is http://scalax.scalaforge.org/ -// -// This software is released under the terms of the Revised BSD License. -// There is NO WARRANTY. See the file LICENSE for the full text. -// -// ----------------------------------------------------------------------------- - -package scala.tools.scalap -package scalax -package rules - -trait Functor[+A] { - type M[+A] <: Functor[A] - def map[B](f : A => B) : M[B] -} - -trait Filter[+A] { - type M[+A] <: Filter[A] - def filter(f : A => Boolean) : M[A] -} - -trait Plus[+A] { - type M[+A] <: Plus[A] - def plus[B >: A](other : => M[B]) : M[B] -} - -trait OrElse[+A] { - type M[+A] <: OrElse[A] - def orElse[B >: A](other : => M[B]) : M[B] -} - -trait Units { - type M[+A] - def unit : M[Unit] - def unit[A](a : => A) : M[A] -} - -trait Zero { - type M[+A] - def zero : M[Nothing] -} - -trait Functors { - type M[+A] <: Functor[A] - - trait Functor[+A] extends rules.Functor[A] { this : M[A] => - type M[+A] = Functors.this.M[A] - } - - trait ZeroFunctor extends Functor[Nothing] { this : M[Nothing] => - override def map[B](f : Nothing => B) : M[B] = this - def filter(f : Nothing => Boolean) : M[Nothing] = this - def plus[B](other : => M[B]) : M[B] = other - def orElse[B](other : => M[B]) : M[B] = other - } -} - -/** One of the 'unit' definitions must be overridden in concrete subclasses */ -trait UnitFunctors extends Units with Functors { - def unit : M[Unit] = unit(()) - def unit[A](a : => A) : M[A] = unit map { Unit => a } -} - - -trait Monoidals extends UnitFunctors { - type M[+A] <: Monoidal[A] - - implicit def app[A, B](fab : M[A => B]) = (fa : M[A]) => fa applyTo fab - implicit def appUnit[A, B](a2b : A => B) = app(unit(a2b)) - - /** One of 'and' and 'applyTo' definitions must be overridden in concrete subclasses */ - trait Monoidal[+A] extends Functor[A] { self : M[A] => - def and[B](fb : => M[B]) : M[(A, B)] = ((a : A) => (b : B) => (a, b))(this)(fb) - def applyTo[B](fab : M[A => B]) : M[B] = fab and this map { case (f, a) => f(a) } - } -} diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Input.scala b/src/scalap/scala/tools/scalap/scalax/rules/Input.scala deleted file mode 100644 index 370eb0d054..0000000000 --- a/src/scalap/scala/tools/scalap/scalax/rules/Input.scala +++ /dev/null @@ -1,68 +0,0 @@ -// ----------------------------------------------------------------------------- -// -// Scalax - The Scala Community Library -// Copyright (c) 2005-8 The Scalax Project. All rights reserved. -// -// The primary distribution site is http://scalax.scalaforge.org/ -// -// This software is released under the terms of the Revised BSD License. -// There is NO WARRANTY. See the file LICENSE for the full text. -// -// ----------------------------------------------------------------------------- - -package scala.tools.scalap -package scalax -package rules - -trait Input[+A] extends Iterable[A] { - - def next : Result[Input[A], A, Nothing] - def index : Int - - def iterator = new Iterator[A] { - private var input : Input[A] = Input.this - private var result = input.next - - def hasNext = result != Failure - def next = { - val Success(input, value) = result - this.input = input - this.result = input.next - value - } - } -} - - -class ArrayInput[A](val array : Array[A], val index : Int) extends Input[A] { - def this(array : Array[A]) = this(array, 0) - - lazy val next : Result[ArrayInput[A], A, Nothing] = if (index >= array.length) Failure - else Success(new ArrayInput[A](array, index + 1), array(index)) - - override lazy val toString = this.iterator.mkString("\"", "", "\"") -} - - -class IterableInput[A](iterator : Iterator[A], val index : Int) extends Input[A] { - def this(iterable : Iterable[A]) = this(iterable.iterator, 0) - - lazy val next : Result[IterableInput[A], A, Nothing] = if (!iterator.hasNext) Failure - else Success(new IterableInput(iterator, index + 1), iterator.next) - - override lazy val toString = this.iterator.mkString("\"", "", "\"") -} - - -/** View one type of input as another based on a transformation rule */ -class View[A, B]( - transform : Input[A] => Result[Input[A], B, Nothing], - val input : Input[A], - val index : Int) - extends Input[B] { - - def next : Result[Input[B], B, Nothing] = transform(input) match { - case Success(context, b) => Success(new View(transform, context, index + 1), b) - case _ => Failure - } -} diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala b/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala deleted file mode 100644 index 639c414675..0000000000 --- a/src/scalap/scala/tools/scalap/scalax/rules/Monad.scala +++ /dev/null @@ -1,46 +0,0 @@ -// ----------------------------------------------------------------------------- -// -// Scalax - The Scala Community Library -// Copyright (c) 2005-8 The Scalax Project. All rights reserved. -// -// The primary distribution site is http://scalax.scalaforge.org/ -// -// This software is released under the terms of the Revised BSD License. -// There is NO WARRANTY. See the file LICENSE for the full text. -// -// ----------------------------------------------------------------------------- - -package scala.tools.scalap -package scalax -package rules - -trait Monad[+A] extends Functor[A] { - type M[+A] <: Monad[A] - def flatMap[B](f : A => M[B]) : M[B] -} - -trait Monads extends UnitFunctors { - type M[+A] <: Monad[A] - - trait Monad[+A] extends Functor[A] with rules.Monad[A] { this : M[A] => - def map[B](f : A => B) = flatMap { a => unit(f(a)) } - } - - trait ZeroMonad extends Monad[Nothing] with ZeroFunctor { this : M[Nothing] => - def flatMap[B](f : Nothing => M[B]) : M[B] = this - } -} - - -trait StateReader extends Monads { - type S - - def get : M[S] - def read[A](f : S => A) : M[A] - def set(s : => S) : M[S] - def update(f : S => S) : M[S] -} - - - - diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala deleted file mode 100644 index b1cc18f90b..0000000000 --- a/src/scalap/scala/tools/scalap/scalax/rules/package.scala +++ /dev/null @@ -1,9 +0,0 @@ -package scala.tools.scalap -package scalax - -package object rules { - implicit lazy val higherKinds = scala.language.higherKinds - implicit lazy val postfixOps = scala.language.postfixOps - implicit lazy val implicitConversions = scala.language.implicitConversions - implicit lazy val reflectiveCalls = scala.language.reflectiveCalls -} diff --git a/starr.number b/starr.number index 89659fcbf3..d55aa7d7fc 100644 --- a/starr.number +++ b/starr.number @@ -1 +1 @@ -starr.version=2.11.0-M2
\ No newline at end of file +starr.version=2.11.0-M4
\ No newline at end of file diff --git a/test/files/gitignore.SAMPLE b/test/files/.gitignore index 161be5b55f..161be5b55f 100644 --- a/test/files/gitignore.SAMPLE +++ b/test/files/.gitignore diff --git a/test/files/codelib/.gitignore b/test/files/codelib/.gitignore deleted file mode 100644 index f77a26afb7..0000000000 --- a/test/files/codelib/.gitignore +++ /dev/null @@ -1 +0,0 @@ -code.jar diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check index a1ff9491cf..7b229ba679 100644 --- a/test/files/jvm/manifests-new.check +++ b/test/files/jvm/manifests-new.check @@ -31,8 +31,8 @@ x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo -x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1> -x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1> +x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<refinement of Bar[String]> +x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<refinement of Bar[String]> ()=() true=true diff --git a/test/files/lib/.gitignore b/test/files/lib/.gitignore deleted file mode 100644 index b4ac0b8789..0000000000 --- a/test/files/lib/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -annotations.jar -enums.jar -genericNest.jar -javac-artifacts.jar -jsoup-1.3.1.jar -methvsfield.jar -nest.jar -scalacheck.jar diff --git a/test/files/neg/ambiguous-float-dots.check b/test/files/neg/ambiguous-float-dots.check deleted file mode 100644 index cdd2d6fa2a..0000000000 --- a/test/files/neg/ambiguous-float-dots.check +++ /dev/null @@ -1,27 +0,0 @@ -ambiguous-float-dots.scala:2: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. - val x0 = 5. - ^ -ambiguous-float-dots.scala:6: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. - val x1 = 5.f - ^ -ambiguous-float-dots.scala:7: warning: Treating numbers with a leading zero as octal is deprecated. - val y0 = 055 - ^ -ambiguous-float-dots.scala:11: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. - 1.+(2) - ^ -ambiguous-float-dots.scala:12: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. - 1. + 2 - ^ -ambiguous-float-dots.scala:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 1.+(2) - ^ -ambiguous-float-dots.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 1. + 2 - ^ -ambiguous-float-dots.scala:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 1 + 2 - ^ -error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found -one error found diff --git a/test/files/neg/ambiguous-float-dots.flags b/test/files/neg/ambiguous-float-dots.flags deleted file mode 100644 index 65faf53579..0000000000 --- a/test/files/neg/ambiguous-float-dots.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings -deprecation
\ No newline at end of file diff --git a/test/files/neg/ambiguous-float-dots.scala b/test/files/neg/ambiguous-float-dots.scala deleted file mode 100644 index 87e948db35..0000000000 --- a/test/files/neg/ambiguous-float-dots.scala +++ /dev/null @@ -1,14 +0,0 @@ -class A { - val x0 = 5. -} - -class B { - val x1 = 5.f - val y0 = 055 -} - -class D { - 1.+(2) - 1. + 2 - 1 + 2 -} diff --git a/test/files/neg/ambiguous-float-dots2.check b/test/files/neg/ambiguous-float-dots2.check index 8919d2c6a8..40c9b4186d 100644 --- a/test/files/neg/ambiguous-float-dots2.check +++ b/test/files/neg/ambiguous-float-dots2.check @@ -1,10 +1,7 @@ -ambiguous-float-dots2.scala:7: error: Non-zero numbers may not have a leading zero. - val y0 = 055 - ^ ambiguous-float-dots2.scala:3: error: identifier expected but '}' found. } ^ -ambiguous-float-dots2.scala:12: error: ';' expected but integer literal found. +ambiguous-float-dots2.scala:11: error: ';' expected but integer literal found. 1. + 2 ^ -three errors found +two errors found diff --git a/test/files/neg/ambiguous-float-dots2.flags b/test/files/neg/ambiguous-float-dots2.flags deleted file mode 100644 index 112fc720a0..0000000000 --- a/test/files/neg/ambiguous-float-dots2.flags +++ /dev/null @@ -1 +0,0 @@ --Xfuture
\ No newline at end of file diff --git a/test/files/neg/ambiguous-float-dots2.scala b/test/files/neg/ambiguous-float-dots2.scala index 87e948db35..b1615c9273 100644 --- a/test/files/neg/ambiguous-float-dots2.scala +++ b/test/files/neg/ambiguous-float-dots2.scala @@ -4,7 +4,6 @@ class A { class B { val x1 = 5.f - val y0 = 055 } class D { diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check index c573eb3e15..619849693c 100644 --- a/test/files/neg/exhausting.check +++ b/test/files/neg/exhausting.check @@ -1,5 +1,5 @@ exhausting.scala:21: warning: match may not be exhaustive. -It would fail on the following input: List(_, _, _) +It would fail on the following inputs: List(_), List(_, _, _) def fail1[T](xs: List[T]) = xs match { ^ exhausting.scala:27: warning: match may not be exhaustive. diff --git a/test/files/neg/exhausting.scala b/test/files/neg/exhausting.scala index 5554ee2671..01c34f7039 100644 --- a/test/files/neg/exhausting.scala +++ b/test/files/neg/exhausting.scala @@ -17,7 +17,7 @@ object Test { case (_: Foo[_], _: Foo[_]) => () } - // fails for: ::(_, ::(_, ::(_, _))) + // fails for: ::(_, Nil), ::(_, ::(_, ::(_, _))), ... def fail1[T](xs: List[T]) = xs match { case Nil => "ok" case x :: y :: Nil => "ok" diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check index 8c8f039225..c733555549 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check @@ -1,5 +1,11 @@ -Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int; - macro method foo cannot be used here - term macros cannot override abstract methods - def foo(x: Int) = macro Impls.impl - ^ -one error found +Test_2.scala:3: error: <$anon: C with A> inherits conflicting members: + macro method t in trait C of type ()Unit and + method t in trait A of type ()Unit +(Note: this can be resolved by declaring an override in <$anon: C with A>.) + val c2 = new C with A {} + ^ +Test_2.scala:5: error: overriding macro method t in trait C of type ()Unit; + method t cannot be used here - only term macros can override term macros + val c4 = new C with A { override def t(): Unit = () } + ^ +two errors found diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala index e43264f52f..f5b2555aa5 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala @@ -1,13 +1,8 @@ -import scala.reflect.macros.{Context => Ctx} +import scala.reflect.macros.Context +import language.experimental.macros -object Impls { - def impl(c: Ctx)(x: c.Expr[Int]) = x -} +trait T { def t(): Unit } +trait A { def t(): Unit = () } -trait Foo { - def foo(x: Int): Int -} - -object Macros extends Foo { - def foo(x: Int) = macro Impls.impl -} +object Macro { def t(c: Context)(): c.Expr[Unit] = c.universe.reify(()) } +trait C extends T { self: A => override def t(): Unit = macro Macro.t } diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala index 08fff30baf..9b4c8e35f0 100644 --- a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala +++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala @@ -1,4 +1,6 @@ object Test extends App { - val designator: Foo = Macros - designator.foo(42) + val c1 = new A with C {} + val c2 = new C with A {} + val c3 = new C with A { override def t(): Unit = macro Macro.t } + val c4 = new C with A { override def t(): Unit = () } }
\ No newline at end of file diff --git a/test/files/neg/names-defaults-neg-ref.check b/test/files/neg/names-defaults-neg-ref.check index 00052c72dc..61d66fd32a 100644 --- a/test/files/neg/names-defaults-neg-ref.check +++ b/test/files/neg/names-defaults-neg-ref.check @@ -1,4 +1,4 @@ -names-defaults-neg-ref.scala:3: error: in anonymous class $anon, multiple overloaded alternatives of method f define default arguments. +names-defaults-neg-ref.scala:3: error: in <$anon: A2235 with B2235>, multiple overloaded alternatives of method f define default arguments. The members with defaults are defined in trait B2235 and trait A2235. new A2235 with B2235 ^ diff --git a/test/files/neg/t0764.check b/test/files/neg/t0764.check index e14c7705b8..6156b52712 100644 --- a/test/files/neg/t0764.check +++ b/test/files/neg/t0764.check @@ -1,6 +1,7 @@ t0764.scala:13: error: type mismatch; found : Node{type T = _1.type} where val _1: Node{type T = NextType} required: Node{type T = Main.this.AType} + (which expands to) Node{type T = Node{type T = NextType}} new Main[AType]( (value: AType).prepend ) ^ one error found diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check index e69be3ef2c..5e3821b153 100644 --- a/test/files/neg/t1112.check +++ b/test/files/neg/t1112.check @@ -1,4 +1,4 @@ -t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => () => Unit)Unit +t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => Test.this.Type1)Unit call(0,() => System.out.println("here we are")) ^ one error found diff --git a/test/files/neg/t1432.check b/test/files/neg/t1432.check index 180cb05e67..e41f3453fe 100644 --- a/test/files/neg/t1432.check +++ b/test/files/neg/t1432.check @@ -1,6 +1,8 @@ -t1432.scala:10: error: type mismatch; - found : (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double) - required: (Int, Unit => Double) +t1432.scala:12: error: type mismatch; + found : (Int, Bug_NoUnique.Alias2[Bug_NoUnique.Wrap[Unit]] => Double) + (which expands to) (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double) + required: Bug_NoUnique.TypeCon[Unit] + (which expands to) (Int, Unit => Double) def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x) ^ one error found diff --git a/test/files/neg/t1432.scala b/test/files/neg/t1432.scala index 638f36554f..bdf2331280 100644 --- a/test/files/neg/t1432.scala +++ b/test/files/neg/t1432.scala @@ -4,7 +4,9 @@ object Bug_NoUnique { case class Wrap[E](parent:E) {} - def wrap[E,A,Y](v : (A,E=>Y)) : (A,Wrap[E]=>Y) = + type Alias2[E] = Wrap[E] + + def wrap[E,A,Y](v : (A,E=>Y)) : (A,Alias2[E]=>Y) = throw new Error("Body here") def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x) diff --git a/test/files/neg/t6123-explaintypes-macros.check b/test/files/neg/t6123-explaintypes-macros.check new file mode 100644 index 0000000000..ebcb8069d5 --- /dev/null +++ b/test/files/neg/t6123-explaintypes-macros.check @@ -0,0 +1,9 @@ +c.universe.Expr[Any]* <: c.universe.Expr[String]*? +false +BadMac_2.scala:6: error: macro implementation has wrong shape: + required: (c: scala.reflect.macros.Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] + found : (c: scala.reflect.macros.Context)(format: c.Expr[String], params: c.Expr[String]*): c.Expr[Unit] +type mismatch for parameter params: c.Expr[Any]* does not conform to c.Expr[String]* + def printf(format: String, params: Any*): Unit = macro printf_impl + ^ +one error found diff --git a/test/files/pos/t6123-explaintypes-macros.flags b/test/files/neg/t6123-explaintypes-macros/BadMac_2.flags index b36707c7cf..b36707c7cf 100644 --- a/test/files/pos/t6123-explaintypes-macros.flags +++ b/test/files/neg/t6123-explaintypes-macros/BadMac_2.flags diff --git a/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala new file mode 100644 index 0000000000..38b8e24444 --- /dev/null +++ b/test/files/neg/t6123-explaintypes-macros/BadMac_2.scala @@ -0,0 +1,8 @@ +import scala.language.experimental.macros +import scala.reflect.macros.Context + +// explain some macro types to me +object BadMac { + def printf(format: String, params: Any*): Unit = macro printf_impl + def printf_impl(c: Context)(format: c.Expr[String], params: c.Expr[String]*): c.Expr[Unit] = ??? +} diff --git a/test/files/neg/t6123-explaintypes-macros/Macros.flags b/test/files/neg/t6123-explaintypes-macros/Macros.flags new file mode 100644 index 0000000000..b36707c7cf --- /dev/null +++ b/test/files/neg/t6123-explaintypes-macros/Macros.flags @@ -0,0 +1 @@ +-explaintypes diff --git a/test/files/pos/t6123-explaintypes-macros.scala b/test/files/neg/t6123-explaintypes-macros/Macros.scala index e650ad2038..a12c277c86 100644 --- a/test/files/pos/t6123-explaintypes-macros.scala +++ b/test/files/neg/t6123-explaintypes-macros/Macros.scala @@ -5,3 +5,6 @@ object Macros { def printf(format: String, params: Any*): Unit = macro printf_impl def printf_impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = ??? } + +// something trivial to run +object Test extends App diff --git a/test/files/neg/t6231.check b/test/files/neg/t6231.check index b27961d393..2428bf66d0 100644 --- a/test/files/neg/t6231.check +++ b/test/files/neg/t6231.check @@ -1,6 +1,6 @@ t6231.scala:4: error: Implementation restriction: local trait Bug$X$1 is unable to automatically capture the -free variable value ev$1 on behalf of anonymous class anonfun$qux$1. You can manually assign it to a val inside the trait, -and refer that that val in anonymous class anonfun$qux$1. For more details, see SI-6231. +free variable value ev$1 on behalf of <$anon: Function0>. You can manually assign it to a val inside the trait, +and refer that that val in <$anon: Function0>. For more details, see SI-6231. def qux = { () => ev } ^ one error found diff --git a/test/files/neg/t6260.check b/test/files/neg/t6260.check index 46e9bd1dfc..60c4add143 100644 --- a/test/files/neg/t6260.check +++ b/test/files/neg/t6260.check @@ -1,10 +1,10 @@ -t6260.scala:3: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun +t6260.scala:3: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in <$anon: Box[X] => Box[Y]> which overrides method apply: (v1: T1)R in trait Function1 clashes with definition of the member itself; both have erased type (v1: Object)Object ((bx: Box[X]) => new Box(f(bx.x)))(this) ^ -t6260.scala:8: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun +t6260.scala:8: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in <$anon: Box[X] => Box[Y]> which overrides method apply: (v1: T1)R in trait Function1 clashes with definition of the member itself; both have erased type (v1: Object)Object diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check index 6337d4c7d9..43c8252753 100644 --- a/test/files/neg/t6666.check +++ b/test/files/neg/t6666.check @@ -1,28 +1,28 @@ -t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from anonymous class 2, would require illegal premature access to object O1 +t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1 F.byname(x) ^ -t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from anonymous class 3, would require illegal premature access to object O2 +t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2 F.byname(x) ^ -t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from anonymous class 4, would require illegal premature access to object O3 +t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3 F.hof(() => x) ^ -t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from anonymous class 7, would require illegal premature access to the unconstructed `this` of class C1 +t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1 F.byname(x) ^ -t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from anonymous class 8, would require illegal premature access to the unconstructed `this` of class C2 +t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2 F.byname(x) ^ -t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from anonymous class 9, would require illegal premature access to the unconstructed `this` of class C3 +t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3 F.hof(() => x) ^ t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C4 object Nested { def xx = x} ^ -t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from anonymous class 12, would require illegal premature access to the unconstructed `this` of class C11 +t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C11 F.byname(x) ^ -t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from anonymous class 13, would require illegal premature access to the unconstructed `this` of class C13 +t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C13 F.hof(() => x) ^ t6666.scala:104: error: Implementation restriction: access of method x$13 in class C14 from object Nested$5, would require illegal premature access to the unconstructed `this` of class C14 diff --git a/test/files/neg/t6666c.check b/test/files/neg/t6666c.check index 8fb9f4ba14..384e52a9fc 100644 --- a/test/files/neg/t6666c.check +++ b/test/files/neg/t6666c.check @@ -4,7 +4,7 @@ class D extends C({def x = 0; object X { x }}) t6666c.scala:5: error: Implementation restriction: access of method x$2 in class D1 from object X$5, would require illegal premature access to the unconstructed `this` of class D1 class D1 extends C1({def x = 0; () => {object X { x }}}) ^ -t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of anonymous class 2 +t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of <$anon: Function0> class D2 extends C2({def x = 0; object X { x }}) ^ three errors found diff --git a/test/files/neg/t6666e.check b/test/files/neg/t6666e.check index 9fcc3ab718..3189612314 100644 --- a/test/files/neg/t6666e.check +++ b/test/files/neg/t6666e.check @@ -1,4 +1,4 @@ -t6666e.scala:8: error: Implementation restriction: anonymous class $anonfun requires premature access to class Crash. +t6666e.scala:8: error: Implementation restriction: <$anon: Nothing => Unit> requires premature access to class Crash. this(Nil.collect{case x =>}) ^ one error found diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check new file mode 100644 index 0000000000..f70d50bee3 --- /dev/null +++ b/test/files/neg/t7636.check @@ -0,0 +1,10 @@ +t7636.scala:3: error: illegal inheritance; + self-type Main.C does not conform to Main.ResultTable[_$3]'s selftype Main.ResultTable[_$3] + class C extends ResultTable(Left(5):Either[_,_])(5) + ^ +t7636.scala:3: error: type mismatch; + found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2 + required: Either[_, _$3(in object Main)] where type _$3(in object Main) + class C extends ResultTable(Left(5):Either[_,_])(5) + ^ +two errors found diff --git a/test/files/neg/t7636.scala b/test/files/neg/t7636.scala new file mode 100644 index 0000000000..a7b1b90151 --- /dev/null +++ b/test/files/neg/t7636.scala @@ -0,0 +1,7 @@ +object Main extends App{ + class ResultTable[E]( query : Either[_,E] )( columns : Int ) + class C extends ResultTable(Left(5):Either[_,_])(5) +} +// Inference of the existential type for the parent type argument +// E still fails. That looks tricky to fix, see the comments in SI-7636. +// But we at least prevent a cascading NPE.
\ No newline at end of file diff --git a/test/files/neg/t7669.check b/test/files/neg/t7669.check new file mode 100644 index 0000000000..c090ed18ce --- /dev/null +++ b/test/files/neg/t7669.check @@ -0,0 +1,7 @@ +t7669.scala:9: warning: match may not be exhaustive. +It would fail on the following input: NotHandled(_) + def exhausto(expr: Expr): Unit = expr match { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t7669.flags b/test/files/neg/t7669.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/t7669.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/t7669.scala b/test/files/neg/t7669.scala new file mode 100644 index 0000000000..12441ec056 --- /dev/null +++ b/test/files/neg/t7669.scala @@ -0,0 +1,13 @@ +object Test { + + sealed abstract class Expr + // Change type of `arg` to `Any` and the exhaustiveness warning + // is issued below + case class Op(arg: Expr) extends Expr + case class NotHandled(num: Double) extends Expr + + def exhausto(expr: Expr): Unit = expr match { + case Op(Op(_)) => + case Op(_) => + } +} diff --git a/test/files/pos/t2081.scala b/test/files/pos/t2081.scala index d772c02dc2..f4f21600c6 100644 --- a/test/files/pos/t2081.scala +++ b/test/files/pos/t2081.scala @@ -7,5 +7,5 @@ object ScalaForRubyists { val x = 10.days // a couple parser corner cases I wanted not to break - val y = 5.e0 + 5e7 + val y = 5.0e0 + 5e7 } diff --git a/test/files/pos/t7649.flags b/test/files/pos/t7649.flags new file mode 100644 index 0000000000..fcf951d907 --- /dev/null +++ b/test/files/pos/t7649.flags @@ -0,0 +1 @@ +-Yrangepos
\ No newline at end of file diff --git a/test/files/pos/t7649.scala b/test/files/pos/t7649.scala new file mode 100644 index 0000000000..a1b02f63f1 --- /dev/null +++ b/test/files/pos/t7649.scala @@ -0,0 +1,20 @@ +object Test { + val c: reflect.macros.Context = ??? + import c.universe._ + reify { + // The lookup of the implicit WeakTypeTag[Any] + // was triggering an unpositioned tree. + c.Expr[Any](Literal(Constant(0))).splice + } + + import scala.reflect.ClassTag + def ct[A: ClassTag]: Expr[A] = ??? + def tt[A: TypeTag]: Expr[A] = ??? + def wtt[A: WeakTypeTag]: Expr[A] = ??? + + reify { + ct[String].splice + tt[String].splice + wtt[String].splice + } +} diff --git a/test/files/pos/t7668.scala b/test/files/pos/t7668.scala new file mode 100644 index 0000000000..222a13d039 --- /dev/null +++ b/test/files/pos/t7668.scala @@ -0,0 +1,12 @@ +trait Space { + type T + val x: T +} + +trait Extractor { + def extract(s: Space): s.T +} + +class Sub extends Extractor { + def extract(s: Space) = s.x +} diff --git a/test/files/pos/t7689.scala b/test/files/pos/t7689.scala new file mode 100644 index 0000000000..022e7ab7a0 --- /dev/null +++ b/test/files/pos/t7689.scala @@ -0,0 +1,7 @@ +object A { + // The default getter must have an explicit return type (List[_] => Int) + // This wasn't happening since e28c3edda4. That commit encoded upper/lower + // bounds of Any/Nothing as EmptyTree, which were triggering an .isEmpty + // check in Namers#TypeTreeSubstitutor + def x(f: List[_] => Int = _ => 3) = 9 +} diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala index c884b6425b..f2233f1828 100755 --- a/test/files/presentation/doc/doc.scala +++ b/test/files/presentation/doc/doc.scala @@ -51,10 +51,6 @@ object Test extends InteractiveTest { new Typer(context) with InteractiveTyper with ScaladocTyper } - override lazy val loaders = new scala.tools.nsc.symtab.SymbolLoaders { - val global: outer.type = outer - } - def chooseLink(links: List[LinkTo]): LinkTo = links.head def internalLink(sym: Symbol, site: Symbol) = None def toString(link: LinkTo) = link.toString diff --git a/test/files/run/existentials3-new.check b/test/files/run/existentials3-new.check index c0233d2267..7f02866a29 100644 --- a/test/files/run/existentials3-new.check +++ b/test/files/run/existentials3-new.check @@ -1,8 +1,8 @@ Bar.type, t=TypeRef, s=type Bar.type Bar, t=TypeRef, s=type Bar -Test.ToS, t=RefinedType, s=f3 -Test.ToS, t=RefinedType, s=f4 -Test.ToS, t=RefinedType, s=f5 +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> () => Test.ToS, t=TypeRef, s=trait Function0 () => Test.ToS, t=TypeRef, s=trait Function0 $anon, t=TypeRef, s=type $anon @@ -12,9 +12,9 @@ List[Seq[Int]], t=TypeRef, s=class List List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List Bar.type, t=TypeRef, s=type Bar.type Bar, t=TypeRef, s=type Bar -Test.ToS, t=RefinedType, s=g3 -Test.ToS, t=RefinedType, s=g4 -Test.ToS, t=RefinedType, s=g5 +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> +Test.ToS, t=RefinedType, s=<refinement of Test.ToS> () => Test.ToS, t=TypeRef, s=trait Function0 () => Test.ToS, t=TypeRef, s=trait Function0 $anon, t=TypeRef, s=type $anon diff --git a/test/files/run/macro-term-declared-in-trait.check b/test/files/run/macro-term-declared-in-trait.check index 0d70ac74f3..0f3756ddb6 100644 --- a/test/files/run/macro-term-declared-in-trait.check +++ b/test/files/run/macro-term-declared-in-trait.check @@ -1,6 +1,6 @@ prefix = Expr[Nothing]({ final class $anon extends AnyRef with Base { - def <init>(): anonymous class $anon = { + def <init>(): <$anon: Base> = { $anon.super.<init>(); () }; diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check new file mode 100644 index 0000000000..db1f09b977 --- /dev/null +++ b/test/files/run/repl-javap-app.check @@ -0,0 +1,39 @@ +#partest java6 +Type in expressions to have them evaluated. +Type :help for more information. + +scala> :javap -app MyApp$ +public final void delayedEndpoint$MyApp$1(); + Code: + Stack=2, Locals=1, Args_size=1 + 0: getstatic #61; //Field scala/Console$.MODULE$:Lscala/Console$; + 3: ldc #63; //String Hello, delayed world. + 5: invokevirtual #67; //Method scala/Console$.println:(Ljava/lang/Object;)V + 8: return + LocalVariableTable: + Start Length Slot Name Signature + 0 9 0 this LMyApp$; +} + +scala> +#partest !java6 +Type in expressions to have them evaluated. +Type :help for more information. + +scala> :javap -app MyApp$ + public final void delayedEndpoint$MyApp$1(); + flags: ACC_PUBLIC, ACC_FINAL + Code: + stack=2, locals=1, args_size=1 + 0: getstatic #61 // Field scala/Console$.MODULE$:Lscala/Console$; + 3: ldc #63 // String Hello, delayed world. + 5: invokevirtual #67 // Method scala/Console$.println:(Ljava/lang/Object;)V + 8: return + LocalVariableTable: + Start Length Slot Name Signature + 0 9 0 this LMyApp$; + LineNumberTable: + line 5: 0 +} + +scala> diff --git a/test/files/run/repl-javap-app.scala b/test/files/run/repl-javap-app.scala new file mode 100644 index 0000000000..be04920be1 --- /dev/null +++ b/test/files/run/repl-javap-app.scala @@ -0,0 +1,10 @@ + +import scala.tools.partest.ReplTest + +object MyApp extends App { + Console println "Hello, delayed world." +} + +object Test extends ReplTest { + def code = ":javap -app MyApp$" +} diff --git a/test/files/run/t5256g.check b/test/files/run/t5256g.check index c9c8d6e63d..cef3a413c2 100644 --- a/test/files/run/t5256g.check +++ b/test/files/run/t5256g.check @@ -1,3 +1,5 @@ -anonymous class $anon$1 +$anon Test.$anon$1 -A with B{def <init>(): A with B} +A with B { + def <init>(): A with B +} diff --git a/test/files/run/t5256h.check b/test/files/run/t5256h.check index 1b23a71a4c..1a4a92a684 100644 --- a/test/files/run/t5256h.check +++ b/test/files/run/t5256h.check @@ -1,4 +1,4 @@ -anonymous class $anon$1 +$anon Test.$anon$1 java.lang.Object { final private val x: Int diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check index 46974589d3..b37bf51d73 100644 --- a/test/files/run/t6028.check +++ b/test/files/run/t6028.check @@ -11,7 +11,7 @@ package <empty> { def foo(methodParam: Int): Function0 = { val methodLocal: Int = 0; { - (new anonymous class $anonfun$foo$1(T.this, methodParam, methodLocal): Function0) + (new <$anon: Function0>(T.this, methodParam, methodLocal): Function0) } }; def bar(barParam: Int): Object = { @@ -21,11 +21,11 @@ package <empty> { def tryy(tryyParam: Int): Function0 = { var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0); { - (new anonymous class $anonfun$tryy$1(T.this, tryyParam, tryyLocal): Function0) + (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0) } }; @SerialVersionUID(0) final <synthetic> class $anonfun$foo$1 extends runtime.AbstractFunction0$mcI$sp with Serializable { - def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): anonymous class $anonfun$foo$1 = { + def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): <$anon: Function0> = { $anonfun$foo$1.super.<init>(); () }; @@ -61,7 +61,7 @@ package <empty> { scala.this.Predef.print(scala.Int.box(barParam$1)) }; @SerialVersionUID(0) final <synthetic> class $anonfun$tryy$1 extends runtime.AbstractFunction0$mcV$sp with Serializable { - def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): anonymous class $anonfun$tryy$1 = { + def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): <$anon: Function0> = { $anonfun$tryy$1.super.<init>(); () }; diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check index a18a8e8023..9ac115a13f 100644 --- a/test/files/run/t6555.check +++ b/test/files/run/t6555.check @@ -7,14 +7,14 @@ package <empty> { }; private[this] val f: Int => Int = { @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1$mcII$sp with Serializable { - def <init>(): anonymous class $anonfun = { + def <init>(): <$anon: Int => Int> = { $anonfun.super.<init>(); () }; final def apply(param: Int): Int = $anonfun.this.apply$mcII$sp(param); <specialized> def apply$mcII$sp(param: Int): Int = param }; - (new anonymous class $anonfun(): Int => Int) + (new <$anon: Int => Int>(): Int => Int) }; <stable> <accessor> def f(): Int => Int = Foo.this.f } diff --git a/test/files/run/t7265.scala b/test/files/run/t7265.scala new file mode 100644 index 0000000000..c556930303 --- /dev/null +++ b/test/files/run/t7265.scala @@ -0,0 +1,27 @@ + +import scala.util.Properties._ + +object Test extends App { + + setProp("java.specification.version", "1.7") + + assert( isJavaAtLeast("1.5")) + assert( isJavaAtLeast("1.6")) + assert( isJavaAtLeast("1.7")) + assert(!isJavaAtLeast("1.8")) + assert(!isJavaAtLeast("1.71")) + + failing(isJavaAtLeast("1.a")) + failing(isJavaAtLeast("1")) + failing(isJavaAtLeast("")) + failing(isJavaAtLeast(".")) + failing(isJavaAtLeast(".5")) + failing(isJavaAtLeast("1.7.1")) + + def failing(u: =>Unit) = try { + u + assert(false, "Expected Exception") + } catch { + case _: NumberFormatException => + } +} diff --git a/test/files/run/t7336.scala b/test/files/run/t7336.scala new file mode 100644 index 0000000000..ace83f2c1f --- /dev/null +++ b/test/files/run/t7336.scala @@ -0,0 +1,31 @@ +import scala.concurrent.Await +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.Future +import scala.concurrent.duration.Duration + +/** This test uses recursive calls to Future.flatMap to create arrays whose + * combined size is slightly greater than the JVM heap size. A previous + * implementation of Future.flatMap would retain references to each array, + * resulting in a speedy OutOfMemoryError. Now, each array should be freed soon + * after it is created and the test should complete without problems. + */ +object Test { + def main(args: Array[String]) { + def loop(i: Int, arraySize: Int): Future[Unit] = { + val array = new Array[Byte](arraySize) + Future.successful(i).flatMap { i => + if (i == 0) { + Future.successful(()) + } else { + array.size // Force closure to refer to array + loop(i - 1, arraySize) + } + + } + } + + val arraySize = 1000000 + val tooManyArrays = (Runtime.getRuntime().totalMemory() / arraySize).toInt + 1 + Await.ready(loop(tooManyArrays, arraySize), Duration.Inf) + } +}
\ No newline at end of file diff --git a/test/files/run/t7455.check b/test/files/run/t7455.check new file mode 100644 index 0000000000..0eb9342888 --- /dev/null +++ b/test/files/run/t7455.check @@ -0,0 +1,4 @@ +private[package <empty>] def <init>(x$1: String): Outer[E] +private[package <empty>] def <init>(): Outer$PrivateInner +private[package <empty>] def <init>(): Outer$PrivateStaticInner +private[package <empty>] def <init>(x$2: String): Outer$PublicInner diff --git a/test/files/run/t7455/Outer.java b/test/files/run/t7455/Outer.java new file mode 100644 index 0000000000..10c97a9150 --- /dev/null +++ b/test/files/run/t7455/Outer.java @@ -0,0 +1,31 @@ +public class Outer<E> { + public void elements() { + new C<E>() { + }; + } + + private Outer(String a) {} + + static class SubSelf extends Outer<String> { + public SubSelf() { super(""); } + } + + private class PrivateInner { + } + class SubPrivateInner extends PrivateInner { + } + + private class PublicInner { + private PublicInner(String a) {} + } + class SubPublicInner extends PublicInner { + public SubPublicInner() { super(""); } + } + + private static class PrivateStaticInner { + } + public static class SubPrivateStaticInner extends PrivateStaticInner { + } +} + +class C<E> {} diff --git a/test/files/run/t7455/Test.scala b/test/files/run/t7455/Test.scala new file mode 100644 index 0000000000..b23a724c78 --- /dev/null +++ b/test/files/run/t7455/Test.scala @@ -0,0 +1,30 @@ +import scala.tools.partest._ + +// javac adds dummy parameters of type Outer$1 to synthetic access constructors +// This test shows that we strip them from the signatures. If we don't, we trigger +// parsing of Outer$1 which can fail if it references type parameters of the Outer. +// +// OLD OUTPUT: +// private[package <empty>] def <init>(x$2: Outer$1): Outer$PrivateInner +// error: error while loading Outer$1, class file 't7455-run.obj/Outer$1.class' is broken +// (class java.util.NoSuchElementException/key not found: E) +// ... +object Test extends DirectTest { + override def code = "" + + def show { + val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val compiler = newCompiler("-cp", classpath, "-d", testOutput.path) + import compiler._, definitions._ + new compiler.Run + + for { + name <- Seq("Outer", "Outer$PrivateInner", "Outer$PrivateStaticInner", "Outer$PublicInner") + clazz = compiler.rootMirror.staticClass(name) + constr <- clazz.info.member(nme.CONSTRUCTOR).alternatives + } { + println(constr.defString) + fullyInitializeSymbol(constr) + } + } +} diff --git a/test/files/run/t7617a.check b/test/files/run/t7617a.check new file mode 100644 index 0000000000..94954abda4 --- /dev/null +++ b/test/files/run/t7617a.check @@ -0,0 +1,2 @@ +hello +world diff --git a/test/files/run/t7617a/Macros_1.scala b/test/files/run/t7617a/Macros_1.scala new file mode 100644 index 0000000000..f9772c83c0 --- /dev/null +++ b/test/files/run/t7617a/Macros_1.scala @@ -0,0 +1,22 @@ +import scala.reflect.macros.Context +import language.experimental.macros + +object Macros { + def getValueImpl[T](c: Context): c.Expr[T] = { + import c.universe._ + c.Expr[T](Apply(Select(c.prefix.tree, newTermName("getVal")), Nil)) + } + def setValueImpl[T](c: Context)(value: c.Expr[T]): c.Expr[Unit] = { + import c.universe._ + c.Expr[Unit](Apply(Select(c.prefix.tree, newTermName("setVal")), List(value.tree))) + } +} + +object Module { + private var _val: String = "hello" + def setVal(value: String): Unit = this._val = value + def getVal(): String = this._val + + def value: String = macro Macros.getValueImpl[String] + def value_=(value: String): Unit = macro Macros.setValueImpl[String] +} diff --git a/test/files/run/t7617a/Test_2.scala b/test/files/run/t7617a/Test_2.scala new file mode 100644 index 0000000000..da6e34e09d --- /dev/null +++ b/test/files/run/t7617a/Test_2.scala @@ -0,0 +1,5 @@ +object Test extends App { + println(Module.value) + Module.value = "world" + println(Module.value) +}
\ No newline at end of file diff --git a/test/files/run/t7617b.check b/test/files/run/t7617b.check new file mode 100644 index 0000000000..81ec7e8b74 --- /dev/null +++ b/test/files/run/t7617b.check @@ -0,0 +1 @@ +foo = 2 diff --git a/test/files/run/t7617b/Macros_1.scala b/test/files/run/t7617b/Macros_1.scala new file mode 100644 index 0000000000..bc919935c9 --- /dev/null +++ b/test/files/run/t7617b/Macros_1.scala @@ -0,0 +1,8 @@ +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context)(name: c.Expr[String])(value: c.Expr[Any]) = { + import c.universe._ + reify(println(s"${name.splice} = ${value.splice}")) + } +}
\ No newline at end of file diff --git a/test/files/run/t7617b/Test_2.scala b/test/files/run/t7617b/Test_2.scala new file mode 100644 index 0000000000..e27f650e80 --- /dev/null +++ b/test/files/run/t7617b/Test_2.scala @@ -0,0 +1,11 @@ +import scala.language.dynamics +import language.experimental.macros + +class C extends Dynamic { + def updateDynamic(name: String)(value: Any) = macro Macros.impl +} + +object Test extends App { + val c = new C + c.foo = 2 +}
\ No newline at end of file diff --git a/test/files/run/t7657.check b/test/files/run/t7657.check new file mode 100644 index 0000000000..c25d8d1c1b --- /dev/null +++ b/test/files/run/t7657.check @@ -0,0 +1,3 @@ +() +() +() diff --git a/test/files/run/t7657/Macros_1.scala b/test/files/run/t7657/Macros_1.scala new file mode 100644 index 0000000000..b1e31aa2dd --- /dev/null +++ b/test/files/run/t7657/Macros_1.scala @@ -0,0 +1,8 @@ +import scala.reflect.macros.Context +import language.experimental.macros + +trait T { def t(): Unit } +abstract class A extends T { override def t(): Unit = () } + +object Macro { def t(c: Context)(): c.Expr[Unit] = c.universe.reify(()) } +class C extends A { override def t(): Unit = macro Macro.t } diff --git a/test/files/run/t7657/Test_2.scala b/test/files/run/t7657/Test_2.scala new file mode 100644 index 0000000000..5cc46b6aa1 --- /dev/null +++ b/test/files/run/t7657/Test_2.scala @@ -0,0 +1,6 @@ +object Test extends App { + val c = new C() + println(c.t()) + println((c: T).t()) + println((c: A).t()) +}
\ No newline at end of file diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.check b/test/files/run/toolbox_typecheck_macrosdisabled.check index 688f37927c..3de296f1ad 100644 --- a/test/files/run/toolbox_typecheck_macrosdisabled.check +++ b/test/files/run/toolbox_typecheck_macrosdisabled.check @@ -2,7 +2,7 @@ val $u: ru.type = ru; val $m: $u.Mirror = ru.runtimeMirror({ final class $anon extends scala.AnyRef { - def <init>(): anonymous class $anon = { + def <init>(): <$anon: AnyRef> = { $anon.super.<init>(); () }; diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.check b/test/files/run/toolbox_typecheck_macrosdisabled2.check index bdcdb421fd..9810946024 100644 --- a/test/files/run/toolbox_typecheck_macrosdisabled2.check +++ b/test/files/run/toolbox_typecheck_macrosdisabled2.check @@ -2,7 +2,7 @@ val $u: ru.type = ru; val $m: $u.Mirror = ru.runtimeMirror({ final class $anon extends scala.AnyRef { - def <init>(): anonymous class $anon = { + def <init>(): <$anon: AnyRef> = { $anon.super.<init>(); () }; diff --git a/test/files/run/virtpatmat_casting.scala b/test/files/run/virtpatmat_casting.scala index d970abae90..22ac29bc3b 100644 --- a/test/files/run/virtpatmat_casting.scala +++ b/test/files/run/virtpatmat_casting.scala @@ -4,5 +4,6 @@ object Test extends App { // since the :: extractor's argument must be a ::, there has to be a cast before its unapply is invoked case x :: y :: z :: a :: xs => xs ++ List(x) case x :: y :: z :: xs => xs ++ List(x) + case _ => List(0) }) } diff --git a/test/files/speclib/.gitignore b/test/files/speclib/.gitignore deleted file mode 100644 index 2b26f5dfc5..0000000000 --- a/test/files/speclib/.gitignore +++ /dev/null @@ -1 +0,0 @@ -instrumented.jar diff --git a/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala new file mode 100644 index 0000000000..f3a75e24d0 --- /dev/null +++ b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala @@ -0,0 +1,344 @@ +package scala.concurrent.impl + +import java.util.concurrent.ConcurrentLinkedQueue +import java.util.concurrent.CountDownLatch +import org.junit.Assert._ +import org.junit.{ After, Before, Test } +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.annotation.tailrec +import scala.concurrent.ExecutionContext +import scala.concurrent.impl.Promise.DefaultPromise +import scala.util.{ Failure, Success, Try } +import scala.util.control.NonFatal + +/** Tests for the private class DefaultPromise */ +@RunWith(classOf[JUnit4]) +class DefaultPromiseTest { + + // Many tests in this class use a helper class, Tester, to track the state of + // promises and to ensure they behave correctly, particularly the complex behaviour + // of linking. + + type Result = Int + type PromiseId = Int + type HandlerId = Int + type ChainId = Int + + /** The state of a set of set of linked promises. */ + case class Chain( + promises: Set[PromiseId], + state: Either[Set[HandlerId],Try[Result]] + ) + + /** A helper class that provides methods for creating, linking, completing and + * adding handlers to promises. With each operation it verifies that handlers + * are called, any expected exceptions are thrown, and that all promises have + * the expected value. + * + * The links between promises are not tracked precisely. Instead, linked promises + * are placed in the same Chain object. Each link in the same chain will share + * the same value. + */ + class Tester { + var promises = Map.empty[PromiseId, DefaultPromise[Result]] + var chains = Map.empty[ChainId, Chain] + + private var counter = 0 + private def freshId(): Int = { + val id = counter + counter += 1 + id + } + + /** Handlers report their activity on this queue */ + private val handlerQueue = new ConcurrentLinkedQueue[(Try[Result], HandlerId)]() + + /** Get the chain for a given promise */ + private def promiseChain(p: PromiseId): Option[(ChainId, Chain)] = { + val found: Iterable[(ChainId, Chain)] = for ((cid, c) <- chains; p0 <- c.promises; if (p0 == p)) yield ((cid, c)) + found.toList match { + case Nil => None + case x::Nil => Some(x) + case _ => throw new IllegalStateException(s"Promise $p found in more than one chain") + } + } + + /** Passed to `checkEffect` to indicate the expected effect of an operation */ + sealed trait Effect + case object NoEffect extends Effect + case class HandlersFired(result: Try[Result], handlers: Set[HandlerId]) extends Effect + case object MaybeIllegalThrown extends Effect + case object IllegalThrown extends Effect + + /** Runs an operation while verifying that the operation has the expected effect */ + private def checkEffect(expected: Effect)(f: => Any) { + assert(handlerQueue.isEmpty()) // Should have been cleared by last usage + val result = Try(f) + + var fireCounts = Map.empty[(Try[Result], HandlerId), Int] + while (!handlerQueue.isEmpty()) { + val key = handlerQueue.poll() + val newCount = fireCounts.getOrElse(key, 0) + 1 + fireCounts = fireCounts.updated(key, newCount) + } + + def assertIllegalResult = result match { + case Failure(e: IllegalStateException) => () + case _ => fail(s"Expected IllegalStateException: $result") + } + + expected match { + case NoEffect => + assertTrue(s"Shouldn't throw exception: $result", result.isSuccess) + assertEquals(Map.empty[(Try[Result], HandlerId), Int], fireCounts) + case HandlersFired(firingResult, handlers) => + assert(result.isSuccess) + val expectedCounts = handlers.foldLeft(Map.empty[(Try[Result], HandlerId), Int]) { + case (map, hid) => map.updated((firingResult, hid), 1) + } + assertEquals(expectedCounts, fireCounts) + case MaybeIllegalThrown => + if (result.isFailure) assertIllegalResult + assertEquals(Map.empty, fireCounts) + case IllegalThrown => + assertIllegalResult + assertEquals(Map.empty, fireCounts) + } + } + + /** Check each promise has the expected value. */ + private def assertPromiseValues() { + for ((cid, chain) <- chains; p <- chain.promises) { + chain.state match { + case Right(result) => assertEquals(Some(result), promises(p).value) + case Left(_) => () + } + } + } + + /** Create a promise, returning a handle. */ + def newPromise(): PromiseId = { + val pid = freshId() + val cid = freshId() + promises = promises.updated(pid, new DefaultPromise[Result]()) + chains = chains.updated(cid, Chain(Set(pid), Left(Set.empty))) + assertPromiseValues() + pid + } + + /** Complete a promise */ + def complete(p: PromiseId) { + val r = Success(freshId()) + val (cid, chain) = promiseChain(p).get + val (completionEffect, newState) = chain.state match { + case Left(handlers) => (HandlersFired(r, handlers), Right(r)) + case Right(completion) => (IllegalThrown, chain.state) + } + checkEffect(completionEffect) { promises(p).complete(r) } + chains = chains.updated(cid, chain.copy(state = newState)) + assertPromiseValues() + } + + /** Attempt to link two promises together */ + def link(a: PromiseId, b: PromiseId): (ChainId, ChainId) = { + val promiseA = promises(a) + val promiseB = promises(b) + val (cidA, chainA) = promiseChain(a).get + val (cidB, chainB) = promiseChain(b).get + + // Examine the state of each promise's chain to work out + // the effect of linking the promises, and to work out + // if the two chains should be merged. + + sealed trait MergeOp + case object NoMerge extends MergeOp + case class Merge(state: Either[Set[HandlerId],Try[Result]]) extends MergeOp + + val (linkEffect, mergeOp) = (chainA.state, chainB.state) match { + case (Left(handlers1), Left(handlers2)) => + (NoEffect, Merge(Left(handlers1 ++ handlers2))) + case (Left(handlers), Right(result)) => + (HandlersFired(result, handlers), Merge(Right(result))) + case (Right(result), Left(handlers)) => + (HandlersFired(result, handlers), Merge(Right(result))) + case (Right(_), Right(_)) if (cidA == cidB) => + (MaybeIllegalThrown, NoMerge) // Won't be thrown if happen to link a promise to itself + case (Right(_), Right(_)) => + (IllegalThrown, NoMerge) + } + + // Perform the linking and merge the chains, if appropriate + + checkEffect(linkEffect) { promiseA.linkRootOf(promiseB) } + + val (newCidA, newCidB) = mergeOp match { + case NoMerge => (cidA, cidB) + case Merge(newState) => { + chains = chains - cidA + chains = chains - cidB + val newCid = freshId() + chains = chains.updated(newCid, Chain(chainA.promises ++ chainB.promises, newState)) + (newCid, newCid) + } + } + assertPromiseValues() + (newCidA, newCidB) + } + + /** Attach an onComplete handler. When called, the handler will + * place an entry into `handlerQueue` with the handler's identity. + * This allows verification of handler calling semantics. + */ + def attachHandler(p: PromiseId): HandlerId = { + val hid = freshId() + val promise = promises(p) + val (cid, chain) = promiseChain(p).get + val (attachEffect, newState) = chain.state match { + case Left(handlers) => + (NoEffect, Left(handlers + hid)) + case Right(result) => + (HandlersFired(result, Set(hid)), Right(result)) + } + implicit val ec = new ExecutionContext { + def execute(r: Runnable) { r.run() } + def reportFailure(t: Throwable) { t.printStackTrace() } + } + + checkEffect(attachEffect) { promise.onComplete(result => handlerQueue.add((result, hid))) } + chains = chains.updated(cid, chain.copy(state = newState)) + assertPromiseValues() + hid + } + } + + // Some methods and objects that build a list of promise + // actions to test and then execute them + + type PromiseKey = Int + + sealed trait Action + case class Complete(p: PromiseKey) extends Action + case class Link(a: PromiseKey, b: PromiseKey) extends Action + case class AttachHandler(p: PromiseKey) extends Action + + /** Tests a sequence of actions on a Tester. Creates promises as needed. */ + private def testActions(actions: Seq[Action]) { + val t = new Tester() + var pMap = Map.empty[PromiseKey, PromiseId] + def byKey(key: PromiseKey): PromiseId = { + if (!pMap.contains(key)) { + pMap = pMap.updated(key, t.newPromise()) + } + pMap(key) + } + + actions foreach { action => + action match { + case Complete(p) => t.complete(byKey(p)) + case Link(a, b) => t.link(byKey(a), byKey(b)) + case AttachHandler(p) => t.attachHandler(byKey(p)) + } + } + } + + /** Tests all permutations of actions for `count` promises */ + private def testPermutations(count: Int) { + val ps = (0 until count).toList + val pPairs = for (a <- ps; b <- ps) yield (a, b) + + var allActions = ps.map(Complete(_)) ++ pPairs.map { case (a, b) => Link(a, b) } ++ ps.map(AttachHandler(_)) + for ((permutation, i) <- allActions.permutations.zipWithIndex) { + testActions(permutation) + } + } + + /** Test all permutations of actions with a single promise */ + @Test + def testPermutations1 { + testPermutations(1) + } + + /** Test all permutations of actions with two promises - about 40 thousand */ + @Test + def testPermutations2 { + testPermutations(2) + } + + /** Link promises in different orders, using the same link structure as is + * used in Future.flatMap */ + @Test + def simulateFlatMapLinking { + val random = new scala.util.Random(1) + for (_ <- 0 until 10) { + val t = new Tester() + val flatMapCount = 100 + + sealed trait FlatMapEvent + case class Link(a: PromiseId, b: PromiseId) extends FlatMapEvent + case class Complete(p: PromiseId) extends FlatMapEvent + + @tailrec + def flatMapEvents(count: Int, p1: PromiseId, acc: List[FlatMapEvent]): List[FlatMapEvent] = { + if (count == 0) { + Complete(p1)::acc + } else { + val p2 = t.newPromise() + flatMapEvents(count - 1, p2, Link(p2, p1)::acc) + } + } + + val events = flatMapEvents(flatMapCount, t.newPromise(), Nil) + assertEquals(flatMapCount + 1, t.chains.size) // All promises are unlinked + val shuffled = random.shuffle(events) + shuffled foreach { + case Link(a, b) => t.link(a, b) + case Complete(p) => t.complete(p) + } + // All promises should be linked together, no matter the order of their linking + assertEquals(1, t.chains.size) + } + } + + /** Link promises together on more than one thread, using the same link + * structure as is used in Future.flatMap */ + @Test + def testFlatMapLinking { + for (_ <- 0 until 100) { + val flatMapCount = 100 + val startLatch = new CountDownLatch(1) + val doneLatch = new CountDownLatch(flatMapCount + 1) + def execute(f: => Unit) { + val ec = ExecutionContext.global + ec.execute(new Runnable { + def run() { + try { + startLatch.await() + f + doneLatch.countDown() + } catch { + case NonFatal(e) => ec.reportFailure(e) + } + } + }) + } + @tailrec + def flatMapTimes(count: Int, p1: DefaultPromise[Int]) { + if (count == 0) { + execute { p1.success(1) } + } else { + val p2 = new DefaultPromise[Int]() + execute { p2.linkRootOf(p1) } + flatMapTimes(count - 1, p2) + } + } + + val p = new DefaultPromise[Int]() + flatMapTimes(flatMapCount, p) + startLatch.countDown() + doneLatch.await() + assertEquals(Some(Success(1)), p.value) + } + } + +} diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala new file mode 100644 index 0000000000..285e87e3b2 --- /dev/null +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -0,0 +1,89 @@ +package scala.tools.nsc +package symtab + +import scala.reflect.internal.{Phase, NoPhase, SomePhase} +import scala.tools.util.PathResolver +import util.ClassPath +import io.AbstractFile + +/** + * A complete SymbolTable implementation designed to be used in JUnit tests. + * + * It enables `usejavacp` setting so classpath of JUnit runner is being used + * for symbol table's classpath. + * + * This class contains enough of logic implemented to make it possible to + * initialize definitions and inspect symbols. + */ +class SymbolTableForUnitTesting extends SymbolTable { + // Members declared in scala.reflect.api.Trees + override def newStrictTreeCopier: TreeCopier = new StrictTreeCopier + override def newLazyTreeCopier: TreeCopier = new LazyTreeCopier + trait TreeCopier extends InternalTreeCopierOps + // these should be mocks + class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier + class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier + + override def isCompilerUniverse: Boolean = true + def classPath = new PathResolver(settings).result + + object platform extends backend.Platform { + val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this + lazy val loaders: SymbolTableForUnitTesting.this.loaders.type = SymbolTableForUnitTesting.this.loaders + def platformPhases: List[SubComponent] = Nil + val classPath: ClassPath[AbstractFile] = new PathResolver(settings).result + def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true + def isMaybeBoxed(sym: Symbol): Boolean = ??? + def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? + def externalEquals: Symbol = ??? + def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]): Unit = ??? + } + + object loaders extends symtab.SymbolLoaders { + val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this + lazy val platform: symbolTable.platform.type = symbolTable.platform + def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = + sym.info.member(name) + protected override def compileLate(srcfile: AbstractFile): Unit = + sys.error(s"We do not expect compileLate to be called in SymbolTableTest. The srcfile passed in is $srcfile") + } + + class GlobalMirror extends Roots(NoSymbol) { + val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this + def rootLoader: LazyType = new loaders.PackageLoader(classPath) + override def toString = "compiler mirror" + } + + lazy val rootMirror: Mirror = { + val rm = new GlobalMirror + rm.init() + rm.asInstanceOf[Mirror] + } + + def settings: Settings = { + val s = new Settings + // initialize classpath using java classpath + s.usejavacp.value = true + s + } + + // Members declared in scala.reflect.internal.Required + def picklerPhase: scala.reflect.internal.Phase = SomePhase + + // Members declared in scala.reflect.internal.SymbolTable + def currentRunId: Int = 1 + def log(msg: => AnyRef): Unit = println(msg) + def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror + val phases: Seq[Phase] = List(NoPhase, SomePhase) + val phaseWithId: Array[Phase] = { + val maxId = phases.map(_.id).max + val phasesArray = Array.ofDim[Phase](maxId+1) + phases foreach { phase => + phasesArray(phase.id) = phase + } + phasesArray + } + lazy val treeInfo: scala.reflect.internal.TreeInfo{val global: SymbolTableForUnitTesting.this.type} = ??? + + phase = SomePhase +} diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala new file mode 100644 index 0000000000..537cb93ef3 --- /dev/null +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala @@ -0,0 +1,50 @@ +package scala.tools.nsc +package symtab + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class SymbolTableTest { + private def createSymbolTable: SymbolTable = new SymbolTableForUnitTesting + + @Test + def initDefinitions = { + val symbolTable = createSymbolTable + symbolTable.definitions.init() + } + + @Test + def basicSubTypeCheck = { + val symbolTable = createSymbolTable + symbolTable.definitions.init() + val listClassTpe = symbolTable.definitions.ListClass.tpe + val seqClassTpe = symbolTable.definitions.SeqClass.tpe + assertTrue("List should be subclass of Seq", listClassTpe <:< seqClassTpe) + } + + /** + * Demonstrates how one can create symbols and type completely + * from scratch and perform sub type check. + */ + @Test + def customClassesSubTypeCheck: Unit = { + val symbolTable = createSymbolTable + import symbolTable._ + symbolTable.definitions.init() + val rootClass = symbolTable.rootMirror.RootClass + val fooSymbol = rootClass.newClassSymbol("Foo": TypeName, NoPosition, 0) + val fooType = new ClassInfoType(Nil, EmptyScope, fooSymbol) + fooSymbol.info = fooType + val barSymbol = rootClass.newClassSymbol("Bar": TypeName, NoPosition, 0) + val fooTypeRef = TypeRef(fooSymbol.owner.tpe, fooSymbol, Nil) + val barType = new ClassInfoType(List(fooTypeRef), EmptyScope, barSymbol) + barSymbol.info = barType + assertTrue("Bar should be subclass of Foo", barSymbol.tpe <:< fooSymbol.tpe) + assertFalse("Foo should be a superclass of Foo", fooSymbol.tpe <:< barSymbol.tpe) + } + +} diff --git a/test/pending/junit/scala/util/t7265.scala b/test/pending/junit/scala/util/t7265.scala new file mode 100644 index 0000000000..3b8fa80dbe --- /dev/null +++ b/test/pending/junit/scala/util/t7265.scala @@ -0,0 +1,46 @@ + +package scala.util +package test + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.util.PropertiesTrait + +/** The java version property uses the spec version + * and must work for all "major.minor" and fail otherwise. + */ +@RunWith(classOf[JUnit4]) +class SpecVersionTest { + val sut = new PropertiesTrait { + override def javaSpecVersion = "1.7" + + override protected def pickJarBasedOn: Class[_] = ??? + override protected def propCategory: String = "test" + + // override because of vals like releaseVersion + override lazy val scalaProps = new java.util.Properties + } + + @Test + def comparesCorrectly(): Unit = { + assert(sut isJavaAtLeast "1.5") + assert(sut isJavaAtLeast "1.6") + assert(sut isJavaAtLeast "1.7") + assert(!(sut isJavaAtLeast "1.8")) + } + @Test(expected = classOf[NumberFormatException]) + def badVersion(): Unit = { + sut isJavaAtLeast "1.a" + } + @Test(expected = classOf[NumberFormatException]) + def missingVersion(): Unit = { + sut isJavaAtLeast "1" + } + @Test(expected = classOf[NumberFormatException]) + def notASpec(): Unit = { + sut isJavaAtLeast "1.7.1" + } +} diff --git a/tools/.gitignore b/tools/.gitignore deleted file mode 100644 index 57701c8353..0000000000 --- a/tools/.gitignore +++ /dev/null @@ -1 +0,0 @@ -push.jar |