summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.md28
-rw-r--r--README.md380
-rw-r--r--build.sbt78
-rw-r--r--project/Osgi.scala1
-rw-r--r--project/Quiet.scala33
-rw-r--r--project/plugins.sbt2
-rw-r--r--spec/02-identifiers-names-and-scopes.md91
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala2
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala157
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala38
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala148
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala37
-rw-r--r--src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala (renamed from src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala)54
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPath.scala60
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala63
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala (renamed from src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala)23
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala98
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala44
-rw-r--r--src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala2
-rw-r--r--src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala (renamed from src/compiler/scala/tools/nsc/classpath/VirtualDirectoryFlatClassPath.scala)5
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala45
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala9
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala51
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala20
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala65
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassFileLookup.scala78
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala367
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala4
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala67
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/scaladoc/.classpath6
-rw-r--r--src/eclipse/test-junit/.classpath2
-rw-r--r--src/intellij/README.md64
-rw-r--r--src/intellij/scala.ipr.SAMPLE12
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala12
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala282
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala217
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala16
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala4
-rw-r--r--src/library/scala/concurrent/Future.scala4
-rw-r--r--src/library/scala/util/control/Exception.scala190
-rw-r--r--src/library/scala/util/matching/Regex.scala29
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala18
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala4
-rw-r--r--src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala26
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala16
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala35
-rw-r--r--src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala25
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala21
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala51
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala16
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala13
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala5
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala12
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala30
-rw-r--r--test/files/jvm/interpreter.check2
-rw-r--r--test/files/jvm/serialization-new.check8
-rw-r--r--test/files/jvm/serialization.check8
-rw-r--r--test/files/neg/constrs.check2
-rw-r--r--test/files/neg/t4460a.check2
-rw-r--r--test/files/neg/t4460b.check2
-rw-r--r--test/files/neg/t9045.check7
-rw-r--r--test/files/neg/t9045.scala8
-rw-r--r--test/files/pos/constant-warning.check4
-rw-r--r--test/files/pos/constant-warning.flags1
-rw-r--r--test/files/pos/constant-warning.scala3
-rw-r--r--test/files/pos/t9397.scala12
-rw-r--r--test/files/run/reify_printf.scala1
-rw-r--r--test/files/run/repl-classbased.check23
-rw-r--r--test/files/run/repl-classbased.scala22
-rw-r--r--test/files/run/repl-implicits-nopredef.check5
-rw-r--r--test/files/run/repl-implicits-nopredef.scala10
-rw-r--r--test/files/run/repl-implicits.check5
-rw-r--r--test/files/run/repl-implicits.scala5
-rw-r--r--test/files/run/t3822.scala19
-rw-r--r--test/files/run/t6198.scala7
-rw-r--r--test/files/run/t6502.scala18
-rw-r--r--test/files/run/t7319.check6
-rw-r--r--test/files/run/t7445.scala6
-rw-r--r--test/files/run/t8549.scala4
-rw-r--r--test/files/run/t8756.check9
-rw-r--r--test/files/run/t8756.scala22
-rw-r--r--test/files/run/various-flat-classpath-types.scala9
-rw-r--r--test/junit/scala/collection/immutable/ListMapTest.scala48
-rw-r--r--test/junit/scala/collection/immutable/ListSetTest.scala53
-rw-r--r--test/junit/scala/collection/mutable/PriorityQueueTest.scala7
-rw-r--r--test/junit/scala/issues/BytecodeTest.scala8
-rw-r--r--test/junit/scala/issues/OptimizedBytecodeTest.scala15
-rw-r--r--test/junit/scala/issues/RunTest.scala23
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala15
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala6
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala8
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala11
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala14
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala8
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala11
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala11
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala8
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala26
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala14
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala22
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala24
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala73
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala8
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala24
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala (renamed from test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala)27
-rw-r--r--test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala (renamed from test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala)59
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala27
-rw-r--r--test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala12
-rw-r--r--test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala143
-rw-r--r--test/junit/scala/tools/testing/ClearAfterClass.java47
-rw-r--r--test/junit/scala/util/matching/RegexTest.scala64
-rw-r--r--test/scaladoc/run/t9752.check5
-rw-r--r--test/scaladoc/run/t9752.scala28
-rw-r--r--versions.properties4
138 files changed, 2083 insertions, 2311 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 617734210f..47d2788623 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,7 +8,7 @@ In 2014, you -- the Scala community -- matched the core team at EPFL in number o
We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)!
-This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala-internals, or tweet about it to @adriaanm.)
+This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to gitter, scala-internals, or tweet about it to @adriaanm.)
By the way, the team at Lightbend is: @adriaanm, @lrytz, @retronym, @SethTisue, and @szeiger.
@@ -117,14 +117,32 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra)
### Pass code review
-Your PR will need to be assigned to one or more reviewers. You can suggest reviewers yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala-internals.
+Your PR will need to be assigned to one or more reviewers. You can suggest reviewers
+yourself; if you're not sure, see the list in [README.md](README.md) or ask on gitter
+or scala-internals.
-To assign a reviewer, add a "review by @reviewer" to your PR description.
+To assign a reviewer, add a "review by @reviewer" to the PR description or in a
+comment on your PR.
NOTE: it's best not to @mention in commit messages, as github pings you every time a commit with your @name on it shuffles through the system (even in other repos, on merges,...).
A reviewer gives the green light by commenting "LGTM" (looks good to me).
-A review feedback may be addressed by pushing new commits to the request, if these commits stand on their own.
+When including review feedback, we typically amend the changes into the existing commit(s)
+and `push -f` to the branch. This is to keep the git history clean. Additional commits
+are OK if they stand on their own.
-Once all these conditions are met, and we agree with the change (we are available on scala-internals to discuss this beforehand, before you put in the coding work!), we will merge your changes.
+Once all these conditions are met, and we agree with the change (we are available on
+gitter or scala-internals to discuss this beforehand, before you put in the coding work!),
+we will merge your changes.
+
+We use the following labels:
+
+Label | Description
+-------------------------|:-----------
+`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted
+`welcome` | added by reviewer / queue curator to welcome someone's first PR (for highlighting in the release notes)
+`release-notes` | added by reviewer / queue curator to make sure this PR is highlighted in the release notes
+`on-hold` | added when this PR should not yet be merged, even though CI is green
+`WIP` | added by the author if a PR is submitted for CI testing, needs more work to be complete
+`assistance-appreciated` | added by the author if help by the community is appreciated to move a change forward
diff --git a/README.md b/README.md
index dc869da0da..6ebb453176 100644
--- a/README.md
+++ b/README.md
@@ -5,12 +5,12 @@ This is the official repository for the [Scala Programming Language](http://www.
To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature.
-For more information on building and developing the core of Scala, read on!
+For more information on building and developing the core of Scala, make sure to read
+the rest of this README!
-Please also check out:
-
-* our [guidelines for contributing](CONTRIBUTING.md).
-* the ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) covers some of the same ground as this README, but in greater detail and in a more tutorial style, using a running example.
+In order to get in touch with Scala contributors, join the
+[scala/contributors](https://gitter.im/scala/contributors) gitter channel or post on the
+[scala-internals mailing list](http://www.scala-lang.org/community/).
# Reporting issues
@@ -18,195 +18,247 @@ We're still using Jira for issue reporting, so please [report any issues](https:
(We would love to start using GitHub Issues, but we're too resource-constrained to take on this migration right now.)
# Get in touch!
-If you need some help with your PR at any time, please feel free to @-mention anyone from the list below (or simply `@scala/team-core-scala`), and we will do our best to help you out:
+If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out:
| username | talk to me about... |
--------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------|
- <img src="https://avatars.githubusercontent.com/adriaanm" height="50px" title="Adriaan Moors"/> | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec |
- <img src="https://avatars.githubusercontent.com/SethTisue" height="50px" title="Seth Tisue"/> | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience |
- <img src="https://avatars.githubusercontent.com/retronym" height="50px" title="Jason Zaugg"/> | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL |
- <img src="https://avatars.githubusercontent.com/Ichoran" height="50px" title="Rex Kerr"/> | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance |
- <img src="https://avatars.githubusercontent.com/lrytz" height="50px" title="Lukas Rytz"/> | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments |
- <img src="https://avatars.githubusercontent.com/VladUreche" height="50px" title="Vlad Ureche"/> | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool |
- <img src="https://avatars.githubusercontent.com/densh" height="50px" title="Denys Shabalin"/> | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library |
- <img src="https://avatars.githubusercontent.com/xeno-by" height="50px" title="Eugene Burmako"/> | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection |
- <img src="https://avatars.githubusercontent.com/heathermiller" height="50px" title="Heather Miller"/> | [`@heathermiller`](https://github.com/heathermiller) | documentation |
- <img src="https://avatars.githubusercontent.com/dickwall" height="50px" title="Dick Wall"/> | [`@dickwall`](https://github.com/dickwall) | process & community, documentation |
- <img src="https://avatars.githubusercontent.com/dragos" height="50px" title="Iulian Dragos"/> | [`@dragos`](https://github.com/dragos) | specialization, back end |
- <img src="https://avatars.githubusercontent.com/axel22" height="50px" title="Aleksandr Prokopec"/> | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization |
- <img src="https://avatars.githubusercontent.com/janekdb" height="50px" title="Janek Bogucki"/> | [`@janekdb`](https://github.com/janekdb) | documentation |
+ <img src="https://avatars.githubusercontent.com/adriaanm" height="50px" title="Adriaan Moors"/> | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec |
+ <img src="https://avatars.githubusercontent.com/SethTisue" height="50px" title="Seth Tisue"/> | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience |
+ <img src="https://avatars.githubusercontent.com/retronym" height="50px" title="Jason Zaugg"/> | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL |
+ <img src="https://avatars.githubusercontent.com/Ichoran" height="50px" title="Rex Kerr"/> | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance |
+ <img src="https://avatars.githubusercontent.com/lrytz" height="50px" title="Lukas Rytz"/> | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments |
+ <img src="https://avatars.githubusercontent.com/VladUreche" height="50px" title="Vlad Ureche"/> | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool |
+ <img src="https://avatars.githubusercontent.com/densh" height="50px" title="Denys Shabalin"/> | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library |
+ <img src="https://avatars.githubusercontent.com/xeno-by" height="50px" title="Eugene Burmako"/> | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection |
+ <img src="https://avatars.githubusercontent.com/heathermiller" height="50px" title="Heather Miller"/> | [`@heathermiller`](https://github.com/heathermiller) | documentation |
+ <img src="https://avatars.githubusercontent.com/dickwall" height="50px" title="Dick Wall"/> | [`@dickwall`](https://github.com/dickwall) | process & community, documentation |
+ <img src="https://avatars.githubusercontent.com/dragos" height="50px" title="Iulian Dragos"/> | [`@dragos`](https://github.com/dragos) | specialization, back end |
+ <img src="https://avatars.githubusercontent.com/axel22" height="50px" title="Aleksandr Prokopec"/> | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization |
+ <img src="https://avatars.githubusercontent.com/janekdb" height="50px" title="Janek Bogucki"/> | [`@janekdb`](https://github.com/janekdb) | documentation |
P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list!
-# Handy Links
- - [A wealth of documentation](http://docs.scala-lang.org)
- - [mailing lists](http://www.scala-lang.org/community/)
- - [Gitter room for Scala contributors](https://gitter.im/scala/contributors)
- - [Scala CI](https://scala-ci.typesafe.com/)
- - download the latest nightlies:
- - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/)
- - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/)
-
# Repository structure
```
scala/
-+--build.xml The main Ant build script, see also under src/build.
-+--pull-binary-libs.sh Pulls binary artifacts from remote repository.
-+--lib/ Pre-compiled libraries for the build.
-+--src/ All sources.
- +---/library Scala Standard Library.
- +---/reflect Scala Reflection.
- +---/compiler Scala Compiler.
- +---/eclipse Eclipse project files.
- +---/intellij IntelliJ project templates.
++--build.sbt The main sbt build script
++--build.xml The deprecated Ant build script
++--pull-binary-libs.sh Pulls binary artifacts from remote repository, used by build scripts
++--lib/ Pre-compiled libraries for the build
++--src/ All sources
+ +---/library Scala Standard Library
+ +---/reflect Scala Reflection
+ +---/compiler Scala Compiler
+ +---/eclipse Eclipse project files
+ +---/intellij IntelliJ project templates
++--spec/ The Scala language specification
+--scripts/ Scripts for the CI jobs (including building releases)
-+--test/ The Scala test suite.
-+--build/ [Generated] Build products output directory for ant.
-+--dist/ [Generated] The destination folder for Scala distributions.
++--test/ The Scala test suite
+ +---/files Partest tests
+ +---/junit JUnit tests
++--build/ [Generated] Build output directory
```
-# How we roll
+# Get Ready to Contribute
## Requirements
-You'll need a Java SDK. The baseline version is 6 for 2.11.x, 8 for
-2.12.x. (It's also possible to use a later SDK for local development,
-but the CI will verify against the baseline version.)
-
-You'll also need Apache Ant (version 1.9.3 or above) and curl (for `./pull-binary-libs.sh`).
-
-Mac OS X and Linux work. Windows may work if you use Cygwin. (Community help with keeping the build working on Windows is appreciated.)
-
-## Git Hygiene
-
-As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 72 or fewer characters for the first line, wrapping subsequent ones at 80 (at most).
-
-When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base.
-
-Writing the commit message is a great sanity check that the commit is of the right size. If it does too many things, the description will be unwieldy and tedious to write. Chop it up (`git add -u --patch` and `git rebase` are your friends) and simplify!
-
-To pinpoint bugs, we often use git bisect, which is only effective when we can count on each commit building (and passing the test suite). Thus, the CI bot enforces this. Please rebase your development history into a sensible list of self-contained commits that tell the story of your bug fix or improvement. Carve them up so that the riskier bits can be reverted independently. Keep changes focussed by splitting out cleanups from refactorings from actual changes to the logic.
-
-This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one).
-
-Please do not @-mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @-mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)).
-
-
-## Reviews
-
-Please consider nominating a reviewer for your PR in the PR's description or a comment. If unsure, not to worry -- the core team will assign one for you.
-
-Your reviewer is also your mentor, who will help you rework your PR so that it meets our requirements. We strive to give timely feedback, and apologize for those times when we are overwhelmed by the volume of contributions. Please feel free to ping us. You are entitled to regular progress updates and at least a quick assessment of feasibility of a bigger PR.
-
-To help you plan your contributions, we communicate our plans on a regular basis on scala-internals, and deadlines are tracked as due dates for [GitHub milestones](https://github.com/scala/scala/milestones).
-
-## Reviewing
-
-Once you've gained some experience with the code base and the process, the next step is to review the contributions of others.
-
-The main goal of this whole process is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part!
-
-## [Labels](https://github.com/scala/scala/labels)
-
-Label | Description
---------------- | -----------
-`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted
-`welcome` | reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes)
-`release-notes` | reviewer / queue curator adds to make sure this PR is highlighted in the release notes
-`on-hold` | added when this PR should not yet be merged, even though CI is green
-
-### Tips & Tricks
-Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows:
+You need the following tools:
+ - A Java SDK. The baseline version is 6 for 2.11.x, 8 for 2.12.x. It's possible
+ to use a later SDK for local development, but the CI will verify against the baseline
+ version.
+ - sbt, we recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner
+ script. It provides sensible default jvm options (stack and heap size).
+ - curl (for `./pull-binary-libs.sh`, used by the sbt / ant build).
+ - Apache Ant (version 1.9.3 or above) if you need to use the (deprecated) ant build.
+
+Mac OS X and Linux work. Windows may work if you use Cygwin. Community help with keeping
+the build working on Windows is appreciated.
+
+## Build Setup
+
+### Basics
+
+Scala is built in layers, where each layer is a complete Scala compiler and library.
+Here is a short description of the layers, from bottom to top:
+
+ - `starr`: the stable reference Scala release. We use an official release of
+ Scala (specified by `starr.version` in [versions.properties](versions.properties)),
+ downloaded from the Central Repository.
+ - `locker` (deprecated, only in ant): an intermediate layer that existed in the
+ ant build to perform a bootstrap.
+ - `quick`: the development layer which is incrementally built when working on
+ changes in the compiler or library.
+ - `strap` (deprecated, only in ant) : a test layer used to check stability of
+ the build.
+
+The sbt build uses `starr` to build `quick`. This is sufficient for most development
+scenarios: changes to the library or the compiler can be tested by running the `quick`
+Scala (see below for how to do that).
+
+However, a full build of Scala (a *bootstrap*, as performed by our CI) requires two
+layers. This guarantees that every Scala version can build itself. If you change the
+code generation part of the Scala compiler, your changes will only reflect in the
+bytecode of the library and compiler after a bootstrap. See below for how to create
+a bootstrap build locally.
+
+### Using the Sbt Build
+
+Core commands:
+ - `compile` compiles all sub-projects (library, reflect, compiler, scaladoc, etc)
+ - `scala` / `scalac` run the REPL / compiler directly from sbt (accept options /
+ arguments)
+ - `dist/mkBin` generates runner scripts (`scala`, `scalac`, etc) in `build/quick/bin`
+ - `dist/mkPack` creates a build in the Scala distribution format in `build/pack`
+ - `test` runs the JUnit test, `testOnly *immutable.ListTest` runs a subset
+ - `partest` runs partest tests (accepts options, try `partest --help`)
+ - `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in
+ other sbt projects)
+ - Optionally `set VersionUtil.baseVersionSuffix in Global := "abcd123-SNAPSHOT"`
+ where `abcd123` is the git hash of the revision being published. You can also
+ use something custom like `"mypatch"`. This changes the version number from
+ `2.12.0-SNAPSHOT` to something more stable (`2.12.0-abcd123-SNAPSHOT`).
+ - Optionally `set publishArtifact in (Compile, packageDoc) in ThisBuild := false`
+ to skip generating / publishing API docs (speeds up the process).
+
+#### Sandbox
+
+We recommend to keep local test files in the `sandbox` directory which is listed in
+the `.gitignore` of the Scala repo.
+
+#### Incremental Compilation
+
+Note that sbt's incremental compilation is often too coarse for the Scala compiler
+codebase and re-compiles too many files, resulting in long build times (check
+[sbt#1104](https://github.com/sbt/sbt/issues/1104) for progress on that front). In the
+meantime you can:
+ - Enable "ant mode" in which sbt only re-compiles source files that were modified.
+ Create a file `local.sbt` containing the line `(incOptions in ThisBuild) := (incOptions in ThisBuild).value.withNameHashing(false).withAntStyle(true)`.
+ Add an entry `local.sbt` to your `~/.gitignore`.
+ - Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its
+ incremental compiler is a bit less conservative, but usually correct.
+
+#### Local Bootstrap Build
+
+To perform a bootstrap using sbt
+ - first a build is published either locally or on a temporary repository,
+ - then a separate invocation of sbt (using the previously built version as `starr`)
+ is used to build / publish the actual build.
+
+Assume the current `starr` version is `2.12.0-M4` (defined in
+[versions.properties](versions.properties)) and the current version is `2.12.0-SNAPSHOT`
+(defined in [build.sbt](build.sbt)). To perform a local bootstrap:
+ - Run `publishLocal` (you may want to specify a custom version suffix and skip
+ generating API docs, see above).
+ - Quit sbt and start a new sbt instance using `sbt -Dstarr.version=<version>` where
+ `<version>` is the version number you published locally.
+ - If the version number you published is not binary compatible with the current
+ `starr`, `set every scalaBinaryVersion := "2.12.0-M4"`. This is not required if
+ the version you published locally is binary compatible, i.e., if the current
+ `starr` is a 2.12.x release and not a milestone / RC.
+
+The last step is required to resolve modules (scala-xml, scala-partest, etc). It
+assumes that the module releases for the current `starr` work (in terms of binary
+compatibility) with the local starr that you published locally. A full bootstrap
+requires re-building the all the modules. On our CI this is handled by the
+[bootstrap](scripts/jobs/integrate/bootstrap) script, but it (currently) cannot
+be easily executed locally.
+
+### IDE Setup
+
+You may use IntelliJ IDEA ([src/intellij/README.md](src/intellij/README.md)) or the
+Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)).
+
+In order to use IntelliJ's incremental compiler:
+ - run `dist/mkBin` in sbt to get a build and the runner scripts in `build/quick/bin`
+ - run "Build" - "Make Project" in IntelliJ
+
+Now you can edit and build in IntelliJ and use the scripts (compiler, REPL) to
+directly test your changes. You can also run the `scala`, `scalac` and `partest`
+commands in sbt. Enable "ant mode" (explained above) to prevent sbt's incremental
+compiler from re-compiling (too many) files before each `partest` invocation.
+
+# Coding Guidelines
+
+Our guidelines for contributing are explained in [CONTRIBUTING.md](CONTRIBUTING.md).
+It contains useful information on our coding standards, testing, documentation, how
+we use git and GitHub and how to get your code reviewed.
+
+You may also want to check out the following resources:
+ - The ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html)
+ covers some of the same ground as this README, but in greater detail and in a more
+ tutorial style, using a running example.
+ - [Scala documentation site](http://docs.scala-lang.org)
+
+# Scala CI
+
+Once you submit a PR your commits will are automatically tested by the Scala CI.
+
+If you see a spurious build failure, you can post `/rebuild` as a PR comment.
+The [scabot README](https://github.com/scala/scabot) lists all available commands.
+
+If you'd like to test your patch before having everything polished for review,
+feel free to submit a PR and add the `WIP` label. In case your WIP branch contains
+a large number of commits (that you didn't clean up / squash yet for review),
+consider adding `[ci: last-only]` to the PR title. That way only the last commit
+will be tested, saving some energy and CI-resources. Note that inactive WIP PRs
+will be closed eventually, which does not mean the change is being rejected.
+
+CI performs a full bootstrap. The first task, `validate-publish-core`, publishes
+a build of your commit to the temporary repository
+https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots.
+Note that this build is not yet bootstrapped, its bytecode is built using the
+current `starr`. The version number is `2.12.0-abcd123-SNAPSHOT` where `abcd123`
+is the commit hash.
+
+You can use Scala builds in the validation repository locally by adding a resolver
+and specifying the corresponding `scalaVersion`:
```
$ sbt
-
> set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/"
-> set scalaVersion := "<milestone>-<sha7>-SNAPSHOT"
+> set scalaVersion := "2.12.0-abcd123-SNAPSHOT"
> console
```
-Here, `<milestone>` is the milestone targeted by the PR (e.g., 2.11.6), and `<sha7>` is the 7-character sha (the format used by GitHub on the web).
-
-## IDE Setup
-### Eclipse
-See [src/eclipse/README.md](src/eclipse/README.md).
-
-### IntelliJ 15
-See [src/intellij/README.md](src/intellij/README.md).
-
-## Building with sbt (EXPERIMENTAL)
-
-The experimental sbt-based build definition has arrived! Run `sbt package`
-to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
-Use `sbt test/it:test` to run integration (partest) tests.
-
-We would like to migrate to sbt build as quickly as possible. If you would
-like to help please use the scala-internals mailing list to discuss your
-ideas and coordinate your effort with others.
-
-## Building with Ant
+Note that the scala modules are currently not built / published against the
+tested version during CI validation.
-NOTE: we are working on migrating the build to sbt.
+## Nightly Builds
-If you are behind a HTTP proxy, include
-[`ANT_ARGS=-autoproxy`](https://ant.apache.org/manual/proxy.html) in
-your environment.
+The Scala CI builds nightly download releases (including all modules) and publishes
+them to the following locations:
+ - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D)
+ - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=A)
-Run `ant build-opt` to build an optimized version of the compiler.
-Verify your build using `ant test-opt`.
+The CI also publishes nightly API docs:
+ - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D)
+ - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/2.12.x/)
+ - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/?C=M;O=D)
+ - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/2.11.x/)
-The Scala build system is based on Apache Ant. Most required pre-compiled
-libraries are part of the repository (in 'lib/'). The following however is
-assumed to be installed on the build machine: TODO
+Note that we currently don't publish nightly (or SNAPSHOT) builds in maven or ivy
+format to any repository. You can track progress on this front at
+[scala-jenkins-infra#133](https://github.com/scala/scala-jenkins-infra/issues/133)
+and [scala-dev#68](https://github.com/scala/scala-dev/issues/68).
-### Ant Tips and tricks
+## Scala CI Internals
-Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant).
-
-Command | Description
------------------------ | -----------
-`./pull-binary-libs.sh` | downloads all binary artifacts associated with this commit.
-`ant -p` | prints out information about the commonly used ant targets.
-`ant` or `ant build` | A quick compilation (to `build/quick`) of your changes using the locker compiler.
-`ant dist` | builds a distribution in 'dists/latest'.
-`ant all.clean` | removes all build files and all distributions.
-
-A typical debug cycle incrementally builds quick, then uses it to compile and run the file
-`sandbox/test.scala` as follows:
-
- - `ant && build/quick/bin/scalac -d sandbox sandbox/test.scala && build/quick/bin/scala -cp sandbox Test`
-
-We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick/bin/scala -cp sandbox` to `qs` in our shell.
-
-`ant test-opt` tests that your code is working and fit to be committed:
-
- - Runs the test suite and bootstrapping test on quick.
- - You can run the suite only (skipping strap) with `ant test.suite`.
-
-`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick.
-Note: on most machines this requires more heap than is allocated by default. You can adjust the parameters with `ANT_OPTS`. Example command line:
-
-```sh
-ANT_OPTS="-Xms512M -Xmx2048M -Xss1M" ant docs
-```
+The Scala CI runs as a Jenkins instance on [scala-ci.typesafe.com](https://scala-ci.typesafe.com/),
+configured by a chef cookbook at [scala/scala-jenkins-infra](https://github.com/scala/scala-jenkins-infra).
-### Bootstrapping concepts
-NOTE: This is somewhat outdated, but the ideas still hold.
+The build bot that watches PRs, triggers testing builds and applies the "reviewed" label
+after an LGTM comment is in the [scala/scabot](https://github.com/scala/scabot) repo.
-In order to guarantee the bootstrapping of the Scala compiler, the ant build
-compiles Scala in layers. Each layer is a complete compiled Scala compiler and library.
-A superior layer is always compiled by the layer just below it. Here is a short
-description of the four layers that the build uses, from bottom to top:
+## Community Build
- - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from the Central Repository.
- - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`).
- - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code.
- - `strap`: a test layer used to check stability of the build.
+The community build is a central element for testing Scala releases. A community
+build can be launched for any Scala revision / commit. It first builds the Scala
+library and compiler and then uses that Scala version to build a large number of
+open-source projects from source.
-For each layer, the Scala library is compiled first and the compiler next.
-That means that any changes in the library can immediately be used in the
-compiler without an intermediate build. On the other hand, if building the
-library requires changes in the compiler, a new locker must be built if
-bootstrapping is still possible, or a new starr if it is not.
+Community builds run on the Scala Jenkins instance, the jobs are named
+`..-integrate-community-build`. The community build definitions specifying which
+projects are built are in the
+[scala/community-builds](https://github.com/scala/community-builds) repo.
diff --git a/build.sbt b/build.sbt
index 4962e4e41c..bd76167278 100644
--- a/build.sbt
+++ b/build.sbt
@@ -84,7 +84,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq(
val mappings = artifacts.toSeq.map { case (a, f) =>
val typeSuffix = a.`type` match {
case "pom" => "-pom.xml"
- case "bundle" | "jar" => ".jar"
+ case "jar" => ".jar"
case "doc" => "-docs.jar"
case tpe => s"-$tpe.${a.extension}"
}
@@ -99,6 +99,8 @@ lazy val publishSettings : Seq[Setting[_]] = Seq(
if (file.exists) List(Credentials(file))
else Nil
},
+ // Add a "default" Ivy configuration because sbt expects the Scala distribution to have one:
+ ivyConfigurations += Configuration("default", "Default", true, List(Configurations.Runtime), true),
publishMavenStyle := true
)
@@ -206,7 +208,9 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings +
// Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it
// directly to stdout
- outputStrategy in run := Some(StdoutOutput)
+ outputStrategy in run := Some(StdoutOutput),
+ Quiet.silenceScalaBinaryVersionWarning,
+ Quiet.silenceIvyUpdateInfoLogging
)
/** Extra post-processing for the published POM files. These are needed to create POMs that
@@ -236,8 +240,8 @@ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = {
) ++ extra) }
}
-/** Remove unwanted dependencies from the POM. */
-def removePomDependencies(deps: (String, String)*): Setting[_] = {
+/** Remove unwanted dependencies from the POM and ivy.xml. */
+def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq(
pomPostProcess := { n =>
val n2 = pomPostProcess.value.apply(n)
import scala.xml._
@@ -252,14 +256,40 @@ def removePomDependencies(deps: (String, String)*): Setting[_] = {
case n => Seq(n)
}
})).transform(Seq(n2)).head
+ },
+ deliverLocal := {
+ import scala.xml._
+ import scala.xml.transform._
+ val f = deliverLocal.value
+ val e = (new RuleTransformer(new RewriteRule {
+ override def transform(node: Node) = node match {
+ case e: Elem if e.label == "dependency" && {
+ val org = e.attribute("org").getOrElse("").toString
+ val name = e.attribute("name").getOrElse("").toString
+ deps.exists { case (g, a) =>
+ org == g && (name == a || name == (a + "_" + scalaBinaryVersion.value))
+ }
+ } => Seq.empty
+ case n => Seq(n)
+ }
+ })).transform(Seq(XML.loadFile(f))).head
+ XML.save(f.getAbsolutePath, e, xmlDecl = true)
+ f
}
-}
+)
val disableDocs = Seq[Setting[_]](
sources in (Compile, doc) := Seq.empty,
publishArtifact in (Compile, packageDoc) := false
)
+val disablePublishing = Seq[Setting[_]](
+ publishArtifact := false,
+ // The above is enough for Maven repos but it doesn't prevent publishing of ivy.xml files
+ publish := {},
+ publishLocal := {}
+)
+
lazy val setJarLocation: Setting[_] =
artifactPath in packageBin in Compile := {
// two lines below are copied over from sbt's sources:
@@ -397,43 +427,44 @@ lazy val compiler = configureAsSubproject(project)
"/project/description" -> <description>Compiler for the Scala Programming Language</description>,
"/project/packaging" -> <packaging>jar</packaging>
),
- apiURL := None,
- removePomDependencies(
- ("org.apache.ant", "ant"),
- ("org.scala-lang.modules", "scala-asm")
- )
+ apiURL := None
)
+ .settings(removePomDependencies(
+ ("org.apache.ant", "ant"),
+ ("org.scala-lang.modules", "scala-asm")
+ ): _*)
.dependsOn(library, reflect)
lazy val interactive = configureAsSubproject(project)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
name := "scala-compiler-interactive",
- description := "Scala Interactive Compiler",
- publishArtifact := false
+ description := "Scala Interactive Compiler"
)
.dependsOn(compiler)
lazy val repl = configureAsSubproject(project)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
connectInput in run := true,
- publishArtifact := false,
run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments.
)
.dependsOn(compiler, interactive)
lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline"))
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
libraryDependencies += jlineDep,
- name := "scala-repl-jline",
- publishArtifact := false
+ name := "scala-repl-jline"
)
.dependsOn(repl)
lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" / "repl-jline-embedded-src-dummy")
.settings(scalaSubprojectSettings: _*)
+ .settings(disablePublishing: _*)
.settings(
name := "scala-repl-jline-embedded",
// There is nothing to compile for this project. Instead we use the compile task to create
@@ -464,18 +495,18 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target"
val outdir = (classDirectory in Compile).value
JarJar(inputs, outdir, config)
}),
- publishArtifact := false,
connectInput in run := true
+
)
.dependsOn(replJline)
lazy val scaladoc = configureAsSubproject(project)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
name := "scala-compiler-doc",
description := "Scala Documentation Generator",
libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep),
- publishArtifact := false,
includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt"
)
.dependsOn(compiler)
@@ -497,10 +528,10 @@ lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".
.dependsOn(replJlineEmbedded)
.settings(clearSourceAndResourceDirectories: _*)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
name := "scala-partest-extras",
description := "Scala Compiler Testing Tool (compiler-specific extras)",
- publishArtifact := false,
libraryDependencies += partestDep,
unmanagedSourceDirectories in Compile := List(baseDirectory.value)
)
@@ -510,8 +541,8 @@ lazy val junit = project.in(file("test") / "junit")
.settings(clearSourceAndResourceDirectories: _*)
.settings(commonSettings: _*)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
- publishArtifact := false,
fork in Test := true,
libraryDependencies ++= Seq(junitDep, junitIntefaceDep),
testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"),
@@ -543,9 +574,9 @@ lazy val test = project
.configs(IntegrationTest)
.settings(commonSettings: _*)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(Defaults.itSettings: _*)
.settings(
- publishArtifact := false,
libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep, scalacheckDep),
unmanagedBase in IntegrationTest := baseDirectory.value / "files" / "lib",
unmanagedJars in IntegrationTest <+= (unmanagedBase) (j => Attributed.blank(j)) map(identity),
@@ -572,8 +603,8 @@ lazy val test = project
lazy val manual = configureAsSubproject(project)
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(
- publishArtifact := false,
libraryDependencies ++= Seq(scalaXmlDep, antDep),
classDirectory in Compile := (target in Compile).value / "classes"
)
@@ -643,9 +674,9 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di
lazy val root = (project in file("."))
.settings(disableDocs: _*)
+ .settings(disablePublishing: _*)
.settings(generateBuildCharacterFileSettings: _*)
.settings(
- publishArtifact := false,
publish := {},
publishLocal := {},
commands ++= ScriptCommands.all,
@@ -658,7 +689,8 @@ lazy val root = (project in file("."))
genprod.main(Array(dir.getPath))
GenerateAnyVals.run(dir.getAbsoluteFile)
state
- }
+ },
+ Quiet.silenceIvyUpdateInfoLogging
)
.aggregate(library, reflect, compiler, interactive, repl, replJline, replJlineEmbedded,
scaladoc, scalap, partestExtras, junit, libraryAll, scalaDist).settings(
diff --git a/project/Osgi.scala b/project/Osgi.scala
index d88c282383..36803c0e44 100644
--- a/project/Osgi.scala
+++ b/project/Osgi.scala
@@ -40,7 +40,6 @@ object Osgi {
},
packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), bundle).identityMap,
// Also create OSGi source bundles:
- artifact in (Compile, packageBin) ~= (_.copy(`type` = "bundle")),
packageOptions in (Compile, packageSrc) += Package.ManifestAttributes(
"Bundle-Name" -> (description.value + " Sources"),
"Bundle-SymbolicName" -> (bundleSymbolicName.value + ".source"),
diff --git a/project/Quiet.scala b/project/Quiet.scala
new file mode 100644
index 0000000000..de30ebe6ab
--- /dev/null
+++ b/project/Quiet.scala
@@ -0,0 +1,33 @@
+import sbt._
+import Keys._
+
+object Quiet {
+ // Workaround SBT issue described:
+ //
+ // https://github.com/scala/scala-dev/issues/100
+ def silenceScalaBinaryVersionWarning = ivyConfiguration := {
+ ivyConfiguration.value match {
+ case c: InlineIvyConfiguration =>
+ val delegate = c.log
+ val logger = new Logger {
+ override def trace(t: => Throwable): Unit = delegate.trace(t)
+ override def log(level: sbt.Level.Value, message: => String): Unit = {
+ level match {
+ case sbt.Level.Warn =>
+ val message0 = message
+ val newLevel = if (message.contains("differs from Scala binary version in project"))
+ delegate.log(sbt.Level.Debug, message)
+ else
+ delegate.log(level, message)
+ case _ => delegate.log(level, message)
+ }
+ }
+ override def success(message: => String): Unit = delegate.success(message)
+ }
+ new InlineIvyConfiguration(c.paths, c.resolvers, c.otherResolvers, c.moduleConfigurations, c.localOnly, c.lock, c.checksums, c.resolutionCacheDir, c.updateOptions, logger)
+ case x => x
+ }
+ }
+
+ def silenceIvyUpdateInfoLogging = logLevel in update := Level.Warn
+}
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 23e71c1f26..ac60cd3dd2 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,6 +1,6 @@
libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2"
-libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.0"
+libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.3"
libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bnd" % "2.4.1"
diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md
index 0a9c5dfe77..6653be2ce5 100644
--- a/spec/02-identifiers-names-and-scopes.md
+++ b/spec/02-identifiers-names-and-scopes.md
@@ -17,12 +17,12 @@ which are collectively called _bindings_.
Bindings of different kinds have a precedence defined on them:
1. Definitions and declarations that are local, inherited, or made
- available by a package clause in the same compilation unit where the
- definition occurs have highest precedence.
+ available by a package clause and also defined in the same compilation unit
+ as the reference, have highest precedence.
1. Explicit imports have next highest precedence.
1. Wildcard imports have next highest precedence.
-1. Definitions made available by a package clause not in the
- compilation unit where the definition occurs have lowest precedence.
+1. Definitions made available by a package clause, but not also defined in the
+ same compilation unit as the reference, have lowest precedence.
There are two different name spaces, one for [types](03-types.html#types)
and one for [terms](06-expressions.html#expressions). The same name may designate a
@@ -34,22 +34,18 @@ in some inner scope _shadows_ bindings of lower precedence in the
same scope as well as bindings of the same or lower precedence in outer
scopes.
-<!-- TODO: either the example, the spec, or the compiler is wrong
-
-Note that shadowing is only a partial order. In a situation like
+Note that shadowing is only a partial order. In the following example,
+neither binding of `x` shadows the other. Consequently, the
+reference to `x` in the last line of the block is ambiguous.
```scala
val x = 1
-{
- import p.x
+locally {
+ import p.X.x
x
}
```
-neither binding of `x` shadows the other. Consequently, the
-reference to `x` in the last line of the block above would be ambiguous.
--->
-
A reference to an unqualified (type- or term-) identifier $x$ is bound
by the unique binding, which
@@ -69,17 +65,36 @@ the member of the type $T$ of $e$ which has the name $x$ in the same
namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types).
The type of $e.x$ is the member type of the referenced entity in $T$.
+Binding precedence implies that the way source is bundled in files affects name resolution.
+In particular, imported names have higher precedence than names, defined in other files,
+that might otherwise be visible because they are defined in
+either the current package or an enclosing package.
+
+Note that a package definition is taken as lowest precedence, since packages
+are open and can be defined across arbitrary compilation units.
+
+```scala
+package util {
+ import scala.util
+ class Random
+ object Test extends App {
+ println(new util.Random) // scala.util.Random
+ }
+}
+```
+
###### Example
-Assume the following two definitions of objects named `X` in packages `P` and `Q`.
+Assume the following two definitions of objects named `X` in packages `p` and `q`
+in separate compilation units.
```scala
-package P {
+package p {
object X { val x = 1; val y = 2 }
}
-package Q {
- object X { val x = true; val y = "" }
+package q {
+ object X { val x = true; val y = false }
}
```
@@ -87,25 +102,27 @@ The following program illustrates different kinds of bindings and
precedences between them.
```scala
-package P { // `X' bound by package clause
-import Console._ // `println' bound by wildcard import
-object A {
- println("L4: "+X) // `X' refers to `P.X' here
- object B {
- import Q._ // `X' bound by wildcard import
- println("L7: "+X) // `X' refers to `Q.X' here
- import X._ // `x' and `y' bound by wildcard import
- println("L8: "+x) // `x' refers to `Q.X.x' here
- object C {
- val x = 3 // `x' bound by local definition
- println("L12: "+x) // `x' refers to constant `3' here
- { import Q.X._ // `x' and `y' bound by wildcard import
-// println("L14: "+x) // reference to `x' is ambiguous here
- import X.y // `y' bound by explicit import
- println("L16: "+y) // `y' refers to `Q.X.y' here
- { val x = "abc" // `x' bound by local definition
- import P.X._ // `x' and `y' bound by wildcard import
-// println("L19: "+y) // reference to `y' is ambiguous here
- println("L20: "+x) // `x' refers to string "abc" here
+package p { // `X' bound by package clause
+import Console._ // `println' bound by wildcard import
+object Y {
+ println(s"L4: $X") // `X' refers to `p.X' here
+ locally {
+ import q._ // `X' bound by wildcard import
+ println(s"L7: $X") // `X' refers to `q.X' here
+ import X._ // `x' and `y' bound by wildcard import
+ println(s"L9: $x") // `x' refers to `q.X.x' here
+ locally {
+ val x = 3 // `x' bound by local definition
+ println(s"L12: $x") // `x' refers to constant `3' here
+ locally {
+ import q.X._ // `x' and `y' bound by wildcard import
+// println(s"L15: $x") // reference to `x' is ambiguous here
+ import X.y // `y' bound by explicit import
+ println(s"L17: $y") // `y' refers to `q.X.y' here
+ locally {
+ val x = "abc" // `x' bound by local definition
+ import p.X._ // `x' and `y' bound by wildcard import
+// println(s"L21: $y") // reference to `y' is ambiguous here
+ println(s"L22: $x") // `x' refers to string "abc" here
}}}}}}
```
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index b5b0f93750..242e5d60b3 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -55,7 +55,7 @@ trait GenUtils {
mirrorCall(TermName("" + prefix), args: _*)
def scalaFactoryCall(name: TermName, args: Tree*): Tree =
- call(s"scala.$name.apply", args: _*)
+ call(s"_root_.scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
scalaFactoryCall(TermName(name), args: _*)
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index 3b91d28360..a5c4c7e0a3 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -28,7 +28,7 @@ trait NodePrinters {
var s = line substring 2
s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
s = s.replace(".apply", "")
- s = "([^\"])scala\\.collection\\.immutable\\.".r.replaceAllIn(s, "$1")
+ s = "([^\"])(_root_\\.)?scala\\.collection\\.immutable\\.".r.replaceAllIn(s, "$1")
s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
s = s.replace("immutable.this.Nil", "List()")
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index e99cce9186..c82ed68da8 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc
import java.net.URL
-import scala.tools.util.PathResolverFactory
+import scala.tools.util.PathResolver
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- lazy val classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs
+ lazy val classpathURLs: Seq[URL] = new PathResolver(this).resultAsURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 847c4cb2d1..7417d9c09d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -13,7 +13,7 @@ import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, U
import scala.collection.{immutable, mutable}
import io.{AbstractFile, Path, SourceReader}
import reporters.Reporter
-import util.{ClassFileLookup, ClassPath, StatisticsInfo, returning}
+import util.{ClassPath, StatisticsInfo, returning}
import scala.reflect.ClassTag
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile}
import scala.reflect.internal.pickling.PickleBuffer
@@ -30,7 +30,6 @@ import backend.jvm.GenBCode
import scala.language.postfixOps
import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
import scala.tools.nsc.classpath._
-import scala.tools.nsc.settings.ClassPathRepresentationType
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -54,12 +53,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = {
- settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
- case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath)
- }
- }
+ def rootLoader: LazyType = new loaders.PackageLoader(ClassPath.RootPackage, classPath)
override def toString = "compiler mirror"
}
implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
@@ -102,14 +96,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type ThisPlatform = JavaPlatform { val global: Global.this.type }
lazy val platform: ThisPlatform = new GlobalPlatform
- def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => flatClassPath
- case ClassPathRepresentationType.Recursive => recursiveClassPath
- }
-
- private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath
-
- private def flatClassPath: FlatClassPath = platform.flatClassPath
+ def classPath: ClassPath = platform.classPath
// sub-components --------------------------------------------------
@@ -394,15 +381,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug && (settings.verbose || currentRun.size < 5))
inform("[running phase " + name + " on " + unit + "]")
+ if (!cancelled(unit)) {
+ currentRun.informUnitStarting(this, unit)
+ try withCurrentUnitNoLog(unit)(task)
+ finally currentRun.advanceUnit()
+ }
+ }
+ final def withCurrentUnitNoLog(unit: CompilationUnit)(task: => Unit) {
val unit0 = currentUnit
try {
currentRun.currentUnit = unit
- if (!cancelled(unit)) {
- currentRun.informUnitStarting(this, unit)
- task
- }
- currentRun.advanceUnit()
+ task
} finally {
//assert(currentUnit == unit)
currentRun.currentUnit = unit0
@@ -768,17 +758,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Extend classpath of `platform` and rescan updated packages. */
def extendCompilerClassPath(urls: URL*): Unit = {
- if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat) {
- val urlClasspaths = urls.map(u => FlatClassPathFactory.newClassPath(AbstractFile.getURL(u), settings))
- val newClassPath = AggregateFlatClassPath.createAggregate(platform.flatClassPath +: urlClasspaths : _*)
- platform.currentFlatClassPath = Some(newClassPath)
- invalidateClassPathEntries(urls.map(_.getPath): _*)
- } else {
- val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
- platform.currentClassPath = Some(newClassPath)
- // Reload all specified jars into this compiler instance
- invalidateClassPathEntries(urls.map(_.getPath): _*)
- }
+ val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings))
+ val newClassPath = AggregateClassPath.createAggregate(platform.classPath +: urlClasspaths : _*)
+ platform.currentClassPath = Some(newClassPath)
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
}
// ------------ Invalidations ---------------------------------
@@ -810,28 +793,26 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* entries on the classpath.
*/
def invalidateClassPathEntries(paths: String*): Unit = {
- implicit object ClassPathOrdering extends Ordering[ClassFileLookup[AbstractFile]] {
- def compare(a:ClassFileLookup[AbstractFile], b:ClassFileLookup[AbstractFile]) = a.asClassPathString compare b.asClassPathString
+ implicit object ClassPathOrdering extends Ordering[ClassPath] {
+ def compare(a: ClassPath, b: ClassPath): Int = a.asClassPathString compareTo b.asClassPathString
}
val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
- def assoc(path: String): Option[(ClassFileLookup[AbstractFile], ClassFileLookup[AbstractFile])] = {
- def origin(lookup: ClassFileLookup[AbstractFile]): Option[String] = lookup match {
- case cp: ClassPath[_] => cp.origin
+ def assoc(path: String): Option[(ClassPath, ClassPath)] = {
+ def origin(lookup: ClassPath): Option[String] = lookup match {
case cp: JFileDirectoryLookup[_] => Some(cp.dir.getPath)
case cp: ZipArchiveFileLookup[_] => Some(cp.zipFile.getPath)
case _ => None
}
- def entries(lookup: ClassFileLookup[AbstractFile]): Seq[ClassFileLookup[AbstractFile]] = lookup match {
- case cp: ClassPath[_] => cp.entries
- case cp: AggregateFlatClassPath => cp.aggregates
- case cp: FlatClassPath => Seq(cp)
+ def entries(lookup: ClassPath): Seq[ClassPath] = lookup match {
+ case cp: AggregateClassPath => cp.aggregates
+ case cp: ClassPath => Seq(cp)
}
val dir = AbstractFile.getDirectory(path) // if path is a `jar`, this is a FileZipArchive (isDirectory is true)
val canonical = dir.canonicalPath // this is the canonical path of the .jar
- def matchesCanonical(e: ClassFileLookup[AbstractFile]) = origin(e) match {
+ def matchesCanonical(e: ClassPath) = origin(e) match {
case Some(opath) =>
AbstractFile.getDirectory(opath).canonicalPath == canonical
case None =>
@@ -839,7 +820,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
entries(classPath) find matchesCanonical match {
case Some(oldEntry) =>
- Some(oldEntry -> ClassFileLookup.createForFile(dir, classPath, settings))
+ Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings))
case None =>
error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
None
@@ -849,19 +830,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (subst.nonEmpty) {
platform updateClassPath subst
informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
- def mkClassPath(elems: Iterable[ClassFileLookup[AbstractFile]]): ClassFileLookup[AbstractFile] =
+ def mkClassPath(elems: Iterable[ClassPath]): ClassPath =
if (elems.size == 1) elems.head
- else ClassFileLookup.createAggregate(elems, classPath)
+ else AggregateClassPath.createAggregate(elems.toSeq: _*)
val oldEntries = mkClassPath(subst.keys)
val newEntries = mkClassPath(subst.values)
classPath match {
- case rcp: ClassPath[_] => mergeNewEntriesRecursive(
- newEntries.asInstanceOf[ClassPath[AbstractFile]], RootClass, Some(rcp), Some(oldEntries.asInstanceOf[ClassPath[AbstractFile]]),
- invalidated, failed)
-
- case fcp: FlatClassPath => mergeNewEntriesFlat(
+ case cp: ClassPath => mergeNewEntries(
RootClass, "",
- oldEntries.asInstanceOf[FlatClassPath], newEntries.asInstanceOf[FlatClassPath], fcp,
+ oldEntries, newEntries, cp,
invalidated, failed)
}
}
@@ -872,69 +849,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
show("could not invalidate system packages", failed)
}
- /** Merges new classpath entries into the symbol table
- *
- * @param newEntries The new classpath entries
- * @param root The root symbol to be resynced (a package class)
- * @param allEntries Optionally, the corresponding package in the complete current classpath
- * @param oldEntries Optionally, the corresponding package in the old classpath entries
- * @param invalidated A listbuffer collecting the invalidated package classes
- * @param failed A listbuffer collecting system package classes which could not be invalidated
- *
- * The merging strategy is determined by the absence or presence of classes and packages.
- *
- * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
- * exists in allEntries. Otherwise it is removed.
- * Otherwise, the action is determined by the following matrix, with columns:
- *
- * old sym action
- * + + recurse into all child packages of newEntries
- * - + invalidate root
- * - - create and enter root
- *
- * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
- */
- private def mergeNewEntriesRecursive(newEntries: ClassPath[AbstractFile], root: ClassSymbol,
- allEntries: Option[ClassPath[AbstractFile]], oldEntries: Option[ClassPath[AbstractFile]],
- invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
- ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
-
- val getPackageName: ClassPath[AbstractFile] => String = _.name
- def hasClasses(cp: Option[ClassPath[AbstractFile]]) = cp.isDefined && cp.get.classes.nonEmpty
- def invalidateOrRemove(root: ClassSymbol) = {
- allEntries match {
- case Some(cp) => root setInfo new loaders.PackageLoader(cp)
- case None => root.owner.info.decls unlink root.sourceModule
- }
- invalidated += root
- }
- def subPackage(cp: ClassPath[AbstractFile], name: String): Option[ClassPath[AbstractFile]] =
- cp.packages find (cp1 => getPackageName(cp1) == name)
-
- val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
- if (classesFound && !isSystemPackageClass(root)) {
- invalidateOrRemove(root)
- } else {
- if (classesFound) {
- if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
- else failed += root
- }
- if (oldEntries.isEmpty) invalidateOrRemove(root)
- else
- for (pstr <- newEntries.packages.map(getPackageName)) {
- val pname = newTermName(pstr)
- val pkg = (root.info decl pname) orElse {
- // package does not exist in symbol table, create symbol to track it
- assert(subPackage(oldEntries.get, pstr).isEmpty)
- loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
- }
- mergeNewEntriesRecursive(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
- subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
- invalidated, failed)
- }
- }
- }
-
/**
* Merges new classpath entries into the symbol table
*
@@ -953,20 +867,19 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* Otherwise, sub-packages in newEntries are looked up in the symbol table (created if
* non-existent) and the merge function is called recursively.
*/
- private def mergeNewEntriesFlat(
- packageClass: ClassSymbol, fullPackageName: String,
- oldEntries: FlatClassPath, newEntries: FlatClassPath, fullClasspath: FlatClassPath,
- invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]): Unit = {
+ private def mergeNewEntries(packageClass: ClassSymbol, fullPackageName: String,
+ oldEntries: ClassPath, newEntries: ClassPath, fullClasspath: ClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]): Unit = {
ifDebug(informProgress(s"syncing $packageClass, $oldEntries -> $newEntries"))
- def packageExists(cp: FlatClassPath): Boolean = {
+ def packageExists(cp: ClassPath): Boolean = {
val (parent, _) = PackageNameUtils.separatePkgAndClassNames(fullPackageName)
cp.packages(parent).exists(_.name == fullPackageName)
}
def invalidateOrRemove(pkg: ClassSymbol) = {
if (packageExists(fullClasspath))
- pkg setInfo new loaders.PackageLoaderUsingFlatClassPath(fullPackageName, fullClasspath)
+ pkg setInfo new loaders.PackageLoader(fullPackageName, fullClasspath)
else
pkg.owner.info.decls unlink pkg.sourceModule
invalidated += pkg
@@ -984,9 +897,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val (_, subPackageName) = PackageNameUtils.separatePkgAndClassNames(p.name)
val subPackage = packageClass.info.decl(newTermName(subPackageName)) orElse {
// package does not exist in symbol table, create a new symbol
- loaders.enterPackage(packageClass, subPackageName, new loaders.PackageLoaderUsingFlatClassPath(p.name, fullClasspath))
+ loaders.enterPackage(packageClass, subPackageName, new loaders.PackageLoader(p.name, fullClasspath))
}
- mergeNewEntriesFlat(
+ mergeNewEntries(
subPackage.moduleClass.asClass, p.name,
oldEntries, newEntries, fullClasspath,
invalidated, failed)
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index bf93ad30bc..1f66657d8d 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -8,10 +8,8 @@ package tools.nsc
import io.{ AbstractFile, Directory, File, Path }
import java.io.IOException
-import scala.tools.nsc.classpath.DirectoryFlatClassPath
+import scala.tools.nsc.classpath.DirectoryClassPath
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
@@ -115,10 +113,7 @@ class ScriptRunner extends HasCompileSocket {
}
def hasClassToRun(d: Directory): Boolean = {
- val cp = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d))
- case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile)
- }
+ val cp = DirectoryClassPath(d.jfile)
cp.findClass(mainClass).isDefined
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 0e2f059a36..dc63b335cc 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,11 +7,9 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import scala.tools.nsc.classpath.{AggregateFlatClassPath, FlatClassPath}
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.{ClassFileLookup, ClassPath, MergedClassPath}
-import scala.tools.util.FlatClassPathResolver
+import scala.tools.nsc.classpath.AggregateClassPath
import scala.tools.util.PathResolver
+import scala.tools.nsc.util.ClassPath
trait JavaPlatform extends Platform {
val global: Global
@@ -19,38 +17,20 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
-
- def classPath: ClassPath[AbstractFile] = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
- "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.")
+ private[nsc] var currentClassPath: Option[ClassPath] = None
+ private[nsc] def classPath: ClassPath = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
- private[nsc] var currentFlatClassPath: Option[FlatClassPath] = None
-
- private[nsc] def flatClassPath: FlatClassPath = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
- "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.")
-
- if (currentFlatClassPath.isEmpty) currentFlatClassPath = Some(new FlatClassPathResolver(settings).result)
- currentFlatClassPath.get
- }
-
/** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassFileLookup[AbstractFile], ClassFileLookup[AbstractFile]]) = global.classPath match {
- case cp: ClassPath[AbstractFile] =>
- val s = subst.asInstanceOf[Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]]
- currentClassPath = Some(new MergedClassPath(cp.entries map (e => s.getOrElse(e, e)), cp.context))
-
- case AggregateFlatClassPath(entries) =>
- val s = subst.asInstanceOf[Map[FlatClassPath, FlatClassPath]]
- currentFlatClassPath = Some(AggregateFlatClassPath(entries map (e => s.getOrElse(e, e))))
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = global.classPath match {
+ case AggregateClassPath(entries) =>
+ currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e))))
- case cp: FlatClassPath =>
- currentFlatClassPath = Some(subst.getOrElse(cp, cp).asInstanceOf[FlatClassPath])
+ case cp: ClassPath =>
+ currentClassPath = Some(subst.getOrElse(cp, cp))
}
def platformPhases = List(
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index 369bcc44ed..e464768bb3 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -6,9 +6,8 @@
package scala.tools.nsc
package backend
-import util.{ClassFileLookup, ClassPath}
import io.AbstractFile
-import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.util.ClassPath
/** The platform dependent pieces of Global.
*/
@@ -16,14 +15,11 @@ trait Platform {
val symbolTable: symtab.SymbolTable
import symbolTable._
- /** The old, recursive implementation of compiler classpath. */
- def classPath: ClassPath[AbstractFile]
-
/** The new implementation of compiler classpath. */
- private[nsc] def flatClassPath: FlatClassPath
+ private[nsc] def classPath: ClassPath
/** Update classpath with a substitution that maps entries to entries */
- def updateClassPath(subst: Map[ClassFileLookup[AbstractFile], ClassFileLookup[AbstractFile]])
+ def updateClassPath(subst: Map[ClassPath, ClassPath])
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
index 01206aa6eb..4287c24dc8 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
@@ -42,15 +42,15 @@ object BackendReporting {
def assertionError(message: String): Nothing = throw new AssertionError(message)
implicit class RightBiasedEither[A, B](val v: Either[A, B]) extends AnyVal {
- def map[U](f: B => U) = v.right.map(f)
- def flatMap[BB](f: B => Either[A, BB]) = v.right.flatMap(f)
+ def map[C](f: B => C): Either[A, C] = v.right.map(f)
+ def flatMap[C](f: B => Either[A, C]): Either[A, C] = v.right.flatMap(f)
def withFilter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match {
case Left(_) => v
case Right(e) => if (f(e)) v else Left(empty) // scalaz.\/ requires an implicit Monoid m to get m.empty
}
- def foreach[U](f: B => U) = v.right.foreach(f)
+ def foreach[U](f: B => U): Unit = v.right.foreach(f)
- def getOrElse[BB >: B](alt: => BB): BB = v.right.getOrElse(alt)
+ def getOrElse[C >: B](alt: => C): C = v.right.getOrElse(alt)
/**
* Get the value, fail with an assertion if this is an error.
@@ -101,11 +101,14 @@ object BackendReporting {
else ""
}
- case MethodNotFound(name, descriptor, ownerInternalName, missingClasses) =>
- val (javaDef, others) = missingClasses.partition(_.definedInJavaSource)
- s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." +
- (if (others.isEmpty) "" else others.map(_.internalName).mkString("\nNote that the following parent classes could not be found on the classpath: ", ", ", "")) +
- (if (javaDef.isEmpty) "" else javaDef.map(_.internalName).mkString("\nNote that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: ", ",", ""))
+ case MethodNotFound(name, descriptor, ownerInternalName, missingClass) =>
+ val missingClassWarning = missingClass match {
+ case None => ""
+ case Some(c) =>
+ if (c.definedInJavaSource) s"\nNote that the parent class ${c.internalName} is defined in a Java source (mixed compilation), no bytecode is available."
+ else s"\nNote that the parent class ${c.internalName} could not be found on the classpath."
+ }
+ s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." + missingClassWarning
case FieldNotFound(name, descriptor, ownerInternalName, missingClass) =>
s"The field node $name$descriptor could not be found because the classfile $ownerInternalName cannot be found on the classpath." +
@@ -127,7 +130,7 @@ object BackendReporting {
}
case class ClassNotFound(internalName: InternalName, definedInJavaSource: Boolean) extends MissingBytecodeWarning
- case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClasses: List[ClassNotFound]) extends MissingBytecodeWarning {
+ case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning {
def isArrayMethod = ownerInternalNameOrArrayDescriptor.charAt(0) == '['
}
case class FieldNotFound(name: String, descriptor: String, ownerInternalName: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
index 0b53ea2fb1..1feca56923 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
@@ -107,6 +107,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
lazy val juHashMapRef : ClassBType = classBTypeFromSymbol(JavaUtilHashMap) // java/util/HashMap
lazy val sbScalaBeanInfoRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.beans.ScalaBeanInfo])
lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda])
+ lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandle])
lazy val jliMethodHandlesRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodHandles])
lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(exitingPickler(getRequiredClass("java.lang.invoke.MethodHandles.Lookup"))) // didn't find a reliable non-stringly-typed way that works for inner classes in the backend
lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType])
@@ -320,6 +321,7 @@ trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] {
def jliCallSiteRef : ClassBType
def jliMethodTypeRef : ClassBType
def jliSerializedLambdaRef : ClassBType
+ def jliMethodHandleRef : ClassBType
def jliMethodHandlesLookupRef : ClassBType
def srBoxesRunTimeRef : ClassBType
def srBoxedUnitRef : ClassBType
@@ -383,6 +385,7 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes:
def juHashMapRef : ClassBType = _coreBTypes.juHashMapRef
def sbScalaBeanInfoRef : ClassBType = _coreBTypes.sbScalaBeanInfoRef
def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef
+ def jliMethodHandleRef : ClassBType = _coreBTypes.jliMethodHandleRef
def jliMethodHandlesRef : ClassBType = _coreBTypes.jliMethodHandlesRef
def jliMethodHandlesLookupRef : ClassBType = _coreBTypes.jliMethodHandlesLookupRef
def jliMethodTypeRef : ClassBType = _coreBTypes.jliMethodTypeRef
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index 340fdc849a..3520d57599 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -135,7 +135,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
return
}
else {
- try { withCurrentUnit(item.cunit)(visit(item)) }
+ try { withCurrentUnitNoLog(item.cunit)(visit(item)) }
catch {
case ex: Throwable =>
ex.printStackTrace()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
index eaf82f5c65..16590ec75c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
@@ -10,11 +10,10 @@ package opt
import scala.tools.asm
import asm.tree._
import scala.collection.JavaConverters._
-import scala.collection.concurrent
+import scala.collection.{concurrent, mutable}
import scala.tools.asm.Attribute
import scala.tools.nsc.backend.jvm.BackendReporting._
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.ClassFileLookup
+import scala.tools.nsc.util.ClassPath
import BytecodeUtils._
import ByteCodeRepository._
import BTypes.InternalName
@@ -26,7 +25,7 @@ import java.util.concurrent.atomic.AtomicLong
*
* @param classPath The compiler classpath where classfiles are searched and read from.
*/
-class ByteCodeRepository[BT <: BTypes](val classPath: ClassFileLookup[AbstractFile], val btypes: BT) {
+class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) {
import btypes._
/**
@@ -132,38 +131,135 @@ class ByteCodeRepository[BT <: BTypes](val classPath: ClassFileLookup[AbstractFi
* The method node for a method matching `name` and `descriptor`, accessed in class `ownerInternalNameOrArrayDescriptor`.
* The declaration of the method may be in one of the parents.
*
- * TODO: make sure we always return the right method, the one being invoked. write tests.
- * - if there's an abstract and a concrete one. could possibly somehow the abstract be returned?
- * - with traits and default methods, if there is more than one default method inherited and
- * no override: what should be returned? We should not just inline one of the two.
+ * Note that the JVM spec performs method lookup in two steps: resolution and selection.
+ *
+ * Method resolution, defined in jvms-5.4.3.3 and jvms-5.4.3.4, is the first step and is identical
+ * for all invocation styles (virtual, interface, special, static). If C is the receiver class
+ * in the invocation instruction:
+ * 1 find a matching method (name and descriptor) in C
+ * 2 then in C's superclasses
+ * 3 then find the maximally-specific matching superinterface methods, succeed if there's a
+ * single non-abstract one. static and private methods in superinterfaces are not considered.
+ * 4 then pick a random non-static, non-private superinterface method.
+ * 5 then fail.
+ *
+ * Note that for an `invokestatic` instruction, a method reference `B.m` may resolve to `A.m`, if
+ * class `B` doesn't specify a matching method `m`, but the parent `A` does.
+ *
+ * Selection depends on the invocation style and is defined in jvms-6.5.
+ * - invokestatic: invokes the resolved method
+ * - invokevirtual / invokeinterface: searches for an override of the resolved method starting
+ * at the dynamic receiver type. the search procedure is basically the same as in resolution,
+ * but it fails at 4 instead of picking a superinterface method at random.
+ * - invokespecial: if C is the receiver in the invocation instruction, searches for an override
+ * of the resolved method starting at
+ * - the superclass of the current class, if C is a superclass of the current class
+ * - C otherwise
+ * again, the search procedure is the same.
+ *
+ * In the method here we implement method *resolution*. Whether or not the returned method is
+ * actually invoked at runtime depends on the invocation instruction and the class hierarchy, so
+ * the users (e.g. the inliner) have to be aware of method selection.
+ *
+ * Note that the returned method may be abstract (ACC_ABSTRACT), native (ACC_NATIVE) or signature
+ * polymorphic (methods `invoke` and `invokeExact` in class `MehtodHandles`).
*
* @return The [[MethodNode]] of the requested method and the [[InternalName]] of its declaring
- * class, or an error message if the method could not be found.
+ * class, or an error message if the method could not be found. An error message is also
+ * returned if method resolution results in multiple default methods.
*/
def methodNode(ownerInternalNameOrArrayDescriptor: String, name: String, descriptor: String): Either[MethodNotFound, (MethodNode, InternalName)] = {
- // on failure, returns a list of class names that could not be found on the classpath
- def methodNodeImpl(ownerInternalName: InternalName): Either[List[ClassNotFound], (MethodNode, InternalName)] = {
- classNode(ownerInternalName) match {
- case Left(e) => Left(List(e))
- case Right(c) =>
- c.methods.asScala.find(m => m.name == name && m.desc == descriptor) match {
- case Some(m) => Right((m, ownerInternalName))
- case None => findInParents(Option(c.superName) ++: c.interfaces.asScala.toList, Nil)
- }
+ def findMethod(c: ClassNode): Option[MethodNode] = c.methods.asScala.find(m => m.name == name && m.desc == descriptor)
+
+ // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.9: "In Java SE 8, the only
+ // signature polymorphic methods are the invoke and invokeExact methods of the class MethodHandle.
+ def isSignaturePolymorphic(owner: InternalName) = owner == coreBTypes.jliMethodHandleRef.internalName && (name == "invoke" || name == "invokeExact")
+
+ // Note: if `owner` is an interface, in the first iteration we search for a matching member in the interface itself.
+ // If that fails, the recursive invocation checks in the superclass (which is Object) with `publicInstanceOnly == true`.
+ // This is specified in jvms-5.4.3.4: interface method resolution only returns public, non-static methods of Object.
+ def findInSuperClasses(owner: ClassNode, publicInstanceOnly: Boolean = false): Either[ClassNotFound, Option[(MethodNode, InternalName)]] = {
+ findMethod(owner) match {
+ case Some(m) if !publicInstanceOnly || (isPublicMethod(m) && !isStaticMethod(m)) => Right(Some((m, owner.name)))
+ case None =>
+ if (isSignaturePolymorphic(owner.name)) Right(Some((owner.methods.asScala.find(_.name == name).get, owner.name)))
+ else if (owner.superName == null) Right(None)
+ else classNode(owner.superName).flatMap(findInSuperClasses(_, isInterface(owner)))
}
}
- // find the MethodNode in one of the parent classes
- def findInParents(parents: List[InternalName], failedClasses: List[ClassNotFound]): Either[List[ClassNotFound], (MethodNode, InternalName)] = parents match {
- case x :: xs => methodNodeImpl(x).left.flatMap(failed => findInParents(xs, failed ::: failedClasses))
- case Nil => Left(failedClasses)
+ def findInInterfaces(initialOwner: ClassNode): Either[ClassNotFound, Option[(MethodNode, InternalName)]] = {
+ val visited = mutable.Set.empty[InternalName]
+ val found = mutable.ListBuffer.empty[(MethodNode, ClassNode)]
+
+ def findIn(owner: ClassNode): Option[ClassNotFound] = {
+ for (i <- owner.interfaces.asScala if !visited(i)) classNode(i) match {
+ case Left(e) => return Some(e)
+ case Right(c) =>
+ visited += i
+ // abstract and static methods are excluded, see jvms-5.4.3.3
+ for (m <- findMethod(c) if !isPrivateMethod(m) && !isStaticMethod(m)) found += ((m, c))
+ val recusionResult = findIn(c)
+ if (recusionResult.isDefined) return recusionResult
+ }
+ None
+ }
+
+ findIn(initialOwner)
+
+ val result =
+ if (found.size <= 1) found.headOption
+ else {
+ val maxSpecific = found.filterNot({
+ case (method, owner) =>
+ isAbstractMethod(method) || {
+ val ownerTp = classBTypeFromClassNode(owner)
+ found exists {
+ case (other, otherOwner) =>
+ (other ne method) && {
+ val otherTp = classBTypeFromClassNode(otherOwner)
+ otherTp.isSubtypeOf(ownerTp).get
+ }
+ }
+ }
+ })
+ // (*) note that if there's no single, non-abstract, maximally-specific method, the jvm
+ // method resolution (jvms-5.4.3.3) returns any of the non-private, non-static parent
+ // methods at random (abstract or concrete).
+ // we chose not to do this here, to prevent the inliner from potentially inlining the
+ // wrong method. in other words, we guarantee that a concrete method is only returned if
+ // it resolves deterministically.
+ // however, there may be multiple abstract methods inherited. in this case we *do* want
+ // to return a result to allow performing accessibility checks in the inliner. note that
+ // for accessibility it does not matter which of these methods is return, as they are all
+ // non-private (i.e., public, protected is not possible, jvms-4.1).
+ // the remaining case (when there's no max-specific method, but some non-abstract one)
+ // does not occur in bytecode generated by scalac or javac. we return no result in this
+ // case. this may at worst prevent some optimizations from happening.
+ if (maxSpecific.size == 1) maxSpecific.headOption
+ else if (found.forall(p => isAbstractMethod(p._1))) found.headOption // (*)
+ else None
+ }
+ Right(result.map(p => (p._1, p._2.name)))
}
// In a MethodInsnNode, the `owner` field may be an array descriptor, for example when invoking `clone`. We don't have a method node to return in this case.
- if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[')
- Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, Nil))
- else
- methodNodeImpl(ownerInternalNameOrArrayDescriptor).left.map(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, _))
+ if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[') {
+ Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, None))
+ } else {
+ def notFound(cnf: Option[ClassNotFound]) = Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, cnf))
+ val res: Either[ClassNotFound, Option[(MethodNode, InternalName)]] = classNode(ownerInternalNameOrArrayDescriptor).flatMap(c =>
+ findInSuperClasses(c) flatMap {
+ case None => findInInterfaces(c)
+ case res => Right(res)
+ }
+ )
+ res match {
+ case Left(e) => notFound(Some(e))
+ case Right(None) => notFound(None)
+ case Right(Some(res)) => Right(res)
+ }
+ }
}
private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
index f8c16e34bd..63906d80e5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -99,6 +99,10 @@ object BytecodeUtils {
methodNode.name == INSTANCE_CONSTRUCTOR_NAME || methodNode.name == CLASS_CONSTRUCTOR_NAME
}
+ def isPublicMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_PUBLIC) != 0
+
+ def isPrivateMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_PRIVATE) != 0
+
def isStaticMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_STATIC) != 0
def isAbstractMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_ABSTRACT) != 0
@@ -107,10 +111,12 @@ object BytecodeUtils {
def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_NATIVE) != 0
- def hasCallerSensitiveAnnotation(methodNode: MethodNode) = methodNode.visibleAnnotations != null && methodNode.visibleAnnotations.asScala.exists(_.desc == "Lsun/reflect/CallerSensitive;")
+ def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = methodNode.visibleAnnotations != null && methodNode.visibleAnnotations.asScala.exists(_.desc == "Lsun/reflect/CallerSensitive;")
def isFinalClass(classNode: ClassNode): Boolean = (classNode.access & ACC_FINAL) != 0
+ def isInterface(classNode: ClassNode): Boolean = (classNode.access & ACC_INTERFACE) != 0
+
def isFinalMethod(methodNode: MethodNode): Boolean = (methodNode.access & (ACC_FINAL | ACC_PRIVATE | ACC_STATIC)) != 0
def isStrictfpMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_STRICT) != 0
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
index 156c80d5a1..d241acf7b1 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -131,19 +131,19 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
(method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)]
(declarationClassNode, source) <- byteCodeRepository.classNodeAndSource(declarationClass): Either[OptimizerWarning, (ClassNode, Source)]
} yield {
- val declarationClassBType = classBTypeFromClassNode(declarationClassNode)
- val info = analyzeCallsite(method, declarationClassBType, call, source)
- import info._
- Callee(
- callee = method,
- calleeDeclarationClass = declarationClassBType,
- safeToInline = safeToInline,
- canInlineFromSource = canInlineFromSource,
- annotatedInline = annotatedInline,
- annotatedNoInline = annotatedNoInline,
- samParamTypes = info.samParamTypes,
- calleeInfoWarning = warning)
- }
+ val declarationClassBType = classBTypeFromClassNode(declarationClassNode)
+ val info = analyzeCallsite(method, declarationClassBType, call, source)
+ import info._
+ Callee(
+ callee = method,
+ calleeDeclarationClass = declarationClassBType,
+ safeToInline = safeToInline,
+ canInlineFromSource = canInlineFromSource,
+ annotatedInline = annotatedInline,
+ annotatedNoInline = annotatedNoInline,
+ samParamTypes = info.samParamTypes,
+ calleeInfoWarning = warning)
+ }
val argInfos = computeArgInfos(callee, call, prodCons)
@@ -388,12 +388,11 @@ class CallGraph[BT <: BTypes](val btypes: BT) {
* @param calleeInfoWarning An inliner warning if some information was not available while
* gathering the information about this callee.
*/
- final case class Callee(
- callee: MethodNode, calleeDeclarationClass: btypes.ClassBType,
- safeToInline: Boolean, canInlineFromSource: Boolean,
- annotatedInline: Boolean, annotatedNoInline: Boolean,
- samParamTypes: IntMap[btypes.ClassBType],
- calleeInfoWarning: Option[CalleeInfoWarning]) {
+ final case class Callee(callee: MethodNode, calleeDeclarationClass: btypes.ClassBType,
+ safeToInline: Boolean, canInlineFromSource: Boolean,
+ annotatedInline: Boolean, annotatedNoInline: Boolean,
+ samParamTypes: IntMap[btypes.ClassBType],
+ calleeInfoWarning: Option[CalleeInfoWarning]) {
override def toString = s"Callee($calleeDeclarationClass.${callee.name})"
}
diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala
index f97d97548e..6b435542a3 100644
--- a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
+++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala
@@ -12,15 +12,16 @@ import scala.tools.nsc.util.ClassRepresentation
/**
* A classpath unifying multiple class- and sourcepath entries.
- * Flat classpath can obtain entries for classes and sources independently
+ * The Classpath can obtain entries for classes and sources independently
* so it tries to do operations quite optimally - iterating only these collections
* which are needed in the given moment and only as far as it's necessary.
+ *
* @param aggregates classpath instances containing entries which this class processes
*/
-case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath {
+case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath {
override def findClassFile(className: String): Option[AbstractFile] = {
@tailrec
- def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] =
+ def find(aggregates: Seq[ClassPath]): Option[AbstractFile] =
if (aggregates.nonEmpty) {
val classFile = aggregates.head.findClassFile(className)
if (classFile.isDefined) classFile
@@ -30,22 +31,24 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
find(aggregates)
}
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
- val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
-
+ override def findClass(className: String): Option[ClassRepresentation] = {
@tailrec
- def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] =
+ def findEntry(aggregates: Seq[ClassPath], isSource: Boolean): Option[ClassRepresentation] =
if (aggregates.nonEmpty) {
- val entry = getEntries(aggregates.head).find(_.name == simpleClassName)
+ val entry = aggregates.head.findClass(className) match {
+ case s @ Some(_: SourceFileEntry) if isSource => s
+ case s @ Some(_: ClassFileEntry) if !isSource => s
+ case _ => None
+ }
if (entry.isDefined) entry
- else findEntry(aggregates.tail, getEntries)
+ else findEntry(aggregates.tail, isSource)
} else None
- val classEntry = findEntry(aggregates, classesGetter(pkg))
- val sourceEntry = findEntry(aggregates, sourcesGetter(pkg))
+ val classEntry = findEntry(aggregates, isSource = false)
+ val sourceEntry = findEntry(aggregates, isSource = true)
(classEntry, sourceEntry) match {
- case (Some(c), Some(s)) => Some(ClassAndSourceFilesEntry(c.file, s.file))
+ case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file))
case (c @ Some(_), _) => c
case (_, s) => s
}
@@ -63,16 +66,16 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
}
override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] =
- getDistinctEntries(classesGetter(inPackage))
+ getDistinctEntries(_.classes(inPackage))
override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] =
- getDistinctEntries(sourcesGetter(inPackage))
+ getDistinctEntries(_.sources(inPackage))
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ override private[nsc] def list(inPackage: String): ClassPathEntries = {
val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip
val distinctPackages = packages.flatten.distinct
val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
- FlatClassPathEntries(distinctPackages, distinctClassesAndSources)
+ ClassPathEntries(distinctPackages, distinctClassesAndSources)
}
/**
@@ -80,11 +83,11 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
* creates an entry containing both of them. If there would be more than one class or source
* entries for the same class it always would use the first entry of each type found on a classpath.
*/
- private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = {
+ private def mergeClassesAndSources(entries: Seq[ClassRepresentation]*): Seq[ClassRepresentation] = {
// based on the implementation from MergedClassPath
var count = 0
val indices = collection.mutable.HashMap[String, Int]()
- val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024)
+ val mergedEntries = new ArrayBuffer[ClassRepresentation](1024)
for {
partOfEntries <- entries
@@ -109,7 +112,7 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
mergedEntries.toIndexedSeq
}
- private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = {
+ private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = {
val seenNames = collection.mutable.HashSet[String]()
val entriesBuffer = new ArrayBuffer[EntryType](1024)
for {
@@ -121,19 +124,16 @@ case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatCl
}
entriesBuffer.toIndexedSeq
}
-
- private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg)
- private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg)
}
-object AggregateFlatClassPath {
- def createAggregate(parts: FlatClassPath*): FlatClassPath = {
- val elems = new ArrayBuffer[FlatClassPath]()
+object AggregateClassPath {
+ def createAggregate(parts: ClassPath*): ClassPath = {
+ val elems = new ArrayBuffer[ClassPath]()
parts foreach {
- case AggregateFlatClassPath(ps) => elems ++= ps
+ case AggregateClassPath(ps) => elems ++= ps
case p => elems += p
}
if (elems.size == 1) elems.head
- else AggregateFlatClassPath(elems.toIndexedSeq)
+ else AggregateClassPath(elems.toIndexedSeq)
}
}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala
new file mode 100644
index 0000000000..08bd98b1d8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassRepresentation
+
+case class ClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepresentation])
+
+object ClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: ClassPathEntries): (Seq[PackageEntry], Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources)
+}
+
+trait ClassFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepresentation {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[nsc] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[nsc] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
index 9bf4e3f779..3a29f1ba11 100644
--- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
@@ -3,47 +3,49 @@
*/
package scala.tools.nsc.classpath
-import scala.reflect.io.AbstractFile
+import scala.reflect.io.{AbstractFile, VirtualDirectory}
+import scala.tools.nsc.Settings
+import FileUtils.AbstractFileOps
import scala.tools.nsc.util.ClassPath
/**
- * A trait that contains factory methods for classpath elements of type T.
- *
- * The logic has been abstracted from ClassPath#ClassPathContext so it's possible
- * to have common trait that supports both recursive and flat classpath representations.
- *
- * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath.
+ * Provides factory methods for classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
*/
-trait ClassPathFactory[T] {
-
+class ClassPathFactory(settings: Settings) {
/**
- * Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): T
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings)
/**
- * Creators for sub classpaths which preserve this context.
- */
- def sourcesInPath(path: String): List[T]
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[ClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
- def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar)
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = scala.tools.nsc.util.ClassPath.expandPath(path, expandStar)
- def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir)
+ def expandDir(extdir: String): List[String] = scala.tools.nsc.util.ClassPath.expandDir(extdir)
- def contentsOfDirsInPath(path: String): List[T] =
+ def contentsOfDirsInPath(path: String): List[ClassPath] =
for {
dir <- expandPath(path, expandStar = false)
name <- expandDir(dir)
entry <- Option(AbstractFile.getDirectory(name))
} yield newClassPath(entry)
- def classesInExpandedPath(path: String): IndexedSeq[T] =
+ def classesInExpandedPath(path: String): IndexedSeq[ClassPath] =
classesInPathImpl(path, expand = true).toIndexedSeq
def classesInPath(path: String) = classesInPathImpl(path, expand = false)
def classesInManifest(useManifestClassPath: Boolean) =
- if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ if (useManifestClassPath) scala.tools.nsc.util.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
else Nil
// Internal
@@ -52,4 +54,25 @@ trait ClassPathFactory[T] {
file <- expandPath(path, expand)
dir <- Option(AbstractFile.getDirectory(file))
} yield newClassPath(dir)
+
+ private def createSourcePath(file: AbstractFile): ClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarSourcePathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectorySourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
+
+object ClassPathFactory {
+ def newClassPath(file: AbstractFile, settings: Settings): ClassPath = file match {
+ case vd: VirtualDirectory => VirtualDirectoryClassPath(vd)
+ case _ =>
+ if (file.isJarOrZip)
+ ZipAndJarClassPathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+ }
}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
index e3964dfa78..aba941e043 100644
--- a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala
@@ -5,9 +5,8 @@ package scala.tools.nsc.classpath
import java.io.File
import java.net.URL
-import scala.reflect.io.AbstractFile
-import scala.reflect.io.PlainFile
-import scala.tools.nsc.util.ClassRepresentation
+import scala.reflect.io.{AbstractFile, PlainFile}
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
import FileUtils._
/**
@@ -17,7 +16,7 @@ import FileUtils._
* when we have a name of a package.
* It abstracts over the file representation to work with both JFile and AbstractFile.
*/
-trait DirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
type F
val dir: F
@@ -33,7 +32,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClass
protected def isMatchingFile(f: F): Boolean
private def getDirectory(forPackage: String): Option[F] = {
- if (forPackage == FlatClassPath.RootPackage) {
+ if (forPackage == ClassPath.RootPackage) {
Some(dir)
} else {
val packageDirName = FileUtils.dirPath(forPackage)
@@ -60,7 +59,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClass
files.map(f => createFileEntry(toAbstractFile(f)))
}
- private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ private[nsc] def list(inPackage: String): ClassPathEntries = {
val dirForPackage = getDirectory(inPackage)
val files: Array[F] = dirForPackage match {
case None => emptyFiles
@@ -75,11 +74,11 @@ trait DirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClass
else if (isMatchingFile(file))
fileBuf += createFileEntry(toAbstractFile(file))
}
- FlatClassPathEntries(packageBuf, fileBuf)
+ ClassPathEntries(packageBuf, fileBuf)
}
}
-trait JFileDirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends DirectoryLookup[FileEntryType] {
+trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] {
type F = File
protected def emptyFiles: Array[File] = Array.empty
@@ -102,8 +101,8 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepClassPathEntry] extends Dire
def asClassPathStrings: Seq[String] = Seq(dir.getPath)
}
-case class DirectoryFlatClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
+case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
def findClassFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className)
@@ -121,13 +120,13 @@ case class DirectoryFlatClassPath(dir: File) extends JFileDirectoryLookup[ClassF
private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
}
-case class DirectoryFlatSourcePath(dir: File) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths {
+case class DirectorySourcePath(dir: File) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths {
def asSourcePathString: String = asClassPathString
protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
protected def isMatchingFile(f: File): Boolean = endsScalaOrJava(f.getName)
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findSourceFile(className) map SourceFileEntryImpl
+ override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl
private def findSourceFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className)
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
deleted file mode 100644
index e95ffe02e3..0000000000
--- a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.classpath
-
-import scala.reflect.io.AbstractFile
-import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation }
-
-/**
- * A base trait for the particular flat classpath representation implementations.
- *
- * We call this variant of a classpath representation flat because it's possible to
- * query the whole classpath using just single instance extending this trait.
- *
- * This is an alternative design compared to scala.tools.nsc.util.ClassPath
- */
-trait FlatClassPath extends ClassFileLookup[AbstractFile] {
- /** Empty string represents root package */
- private[nsc] def packages(inPackage: String): Seq[PackageEntry]
- private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
- private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
-
- /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
- private[nsc] def list(inPackage: String): FlatClassPathEntries
-
- // A default implementation which should be overridden, if we can create the more efficient
- // solution for a given type of FlatClassPath
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
- val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
-
- val foundClassFromClassFiles = classes(pkg).find(_.name == simpleClassName)
- def findClassInSources = sources(pkg).find(_.name == simpleClassName)
-
- foundClassFromClassFiles orElse findClassInSources
- }
-
- override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
- def asClassPathStrings: Seq[String]
-}
-
-object FlatClassPath {
- val RootPackage = ""
-}
-
-case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry])
-
-object FlatClassPathEntries {
- import scala.language.implicitConversions
- // to have working unzip method
- implicit def entry2Tuple(entry: FlatClassPathEntries): (Seq[PackageEntry], Seq[ClassRepClassPathEntry]) = (entry.packages, entry.classesAndSources)
-}
-
-sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile]
-
-trait ClassFileEntry extends ClassRepClassPathEntry {
- def file: AbstractFile
-}
-
-trait SourceFileEntry extends ClassRepClassPathEntry {
- def file: AbstractFile
-}
-
-trait PackageEntry {
- def name: String
-}
-
-private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
- override def name = FileUtils.stripClassExtension(file.name) // class name
-
- override def binary: Option[AbstractFile] = Some(file)
- override def source: Option[AbstractFile] = None
-}
-
-private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
- override def name = FileUtils.stripSourceExtension(file.name)
-
- override def binary: Option[AbstractFile] = None
- override def source: Option[AbstractFile] = Some(file)
-}
-
-private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry {
- override def name = FileUtils.stripClassExtension(classFile.name)
-
- override def binary: Option[AbstractFile] = Some(classFile)
- override def source: Option[AbstractFile] = Some(srcFile)
-}
-
-private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
-
-private[nsc] trait NoSourcePaths {
- def asSourcePathString: String = ""
- private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
-}
-
-private[nsc] trait NoClassPaths {
- def findClassFile(className: String): Option[AbstractFile] = None
- private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
-}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
deleted file mode 100644
index 463301696e..0000000000
--- a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.classpath
-
-import scala.reflect.io.VirtualDirectory
-import scala.tools.nsc.Settings
-import scala.tools.nsc.io.AbstractFile
-import FileUtils.AbstractFileOps
-
-/**
- * Provides factory methods for flat classpath. When creating classpath instances for a given path,
- * it uses proper type of classpath depending on a types of particular files containing sources or classes.
- */
-class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] {
- def newClassPath(file: AbstractFile): FlatClassPath = FlatClassPathFactory.newClassPath(file, settings)
-
- def sourcesInPath(path: String): List[FlatClassPath] =
- for {
- file <- expandPath(path, expandStar = false)
- dir <- Option(AbstractFile getDirectory file)
- } yield createSourcePath(dir)
-
- private def createSourcePath(file: AbstractFile): FlatClassPath =
- if (file.isJarOrZip)
- ZipAndJarFlatSourcePathFactory.create(file, settings)
- else if (file.isDirectory)
- new DirectoryFlatSourcePath(file.file)
- else
- sys.error(s"Unsupported sourcepath element: $file")
-}
-
-object FlatClassPathFactory {
- def newClassPath(file: AbstractFile, settings: Settings): FlatClassPath = file match {
- case vd: VirtualDirectory => VirtualDirectoryFlatClassPath(vd)
- case _ =>
- if (file.isJarOrZip)
- ZipAndJarFlatClassPathFactory.create(file, settings)
- else if (file.isDirectory)
- new DirectoryFlatClassPath(file.file)
- else
- sys.error(s"Unsupported classpath element: $file")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
index c907d565d2..39b0d78135 100644
--- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
+++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
@@ -3,7 +3,7 @@
*/
package scala.tools.nsc.classpath
-import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+import scala.tools.nsc.util.ClassPath.RootPackage
/**
* Common methods related to package names represented as String
diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala
index 06cdab583c..8df0c3743d 100644
--- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryFlatClassPath.scala
+++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala
@@ -4,8 +4,9 @@ import scala.tools.nsc.util.ClassRepresentation
import scala.reflect.io.{Path, PlainFile, VirtualDirectory, AbstractFile}
import FileUtils._
import java.net.URL
+import scala.tools.nsc.util.ClassPath
-case class VirtualDirectoryFlatClassPath(dir: VirtualDirectory) extends FlatClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
+case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
type F = AbstractFile
protected def emptyFiles: Array[AbstractFile] = Array.empty
@@ -23,7 +24,7 @@ case class VirtualDirectoryFlatClassPath(dir: VirtualDirectory) extends FlatClas
def asURLs: Seq[URL] = Seq(new URL(dir.name))
def asClassPathStrings: Seq[String] = Seq(dir.path)
- override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
+ override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
def findClassFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className)
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
index 6ec3805d8b..fe74e5f874 100644
--- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -6,7 +6,8 @@ package scala.tools.nsc.classpath
import java.io.File
import java.net.URL
import scala.annotation.tailrec
-import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources }
+import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources}
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.Settings
import FileUtils._
@@ -19,16 +20,16 @@ import FileUtils._
* when there are a lot of projects having a lot of common dependencies.
*/
sealed trait ZipAndJarFileLookupFactory {
- private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath]
+ private val cache = collection.mutable.Map.empty[AbstractFile, ClassPath]
- def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = {
+ def create(zipFile: AbstractFile, settings: Settings): ClassPath = {
if (settings.YdisableFlatCpCaching) createForZipFile(zipFile)
else createUsingCache(zipFile, settings)
}
- protected def createForZipFile(zipFile: AbstractFile): FlatClassPath
+ protected def createForZipFile(zipFile: AbstractFile): ClassPath
- private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized {
+ private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = cache.synchronized {
def newClassPathInstance = {
if (settings.verbose || settings.Ylogcp)
println(s"$zipFile is not yet in the classpath cache")
@@ -39,11 +40,11 @@ sealed trait ZipAndJarFileLookupFactory {
}
/**
- * Manages creation of flat classpath for class files placed in zip and jar files.
+ * Manages creation of classpath for class files placed in zip and jar files.
* It should be the only way of creating them as it provides caching.
*/
-object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
- private case class ZipArchiveFlatClassPath(zipFile: File)
+object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveClassPath(zipFile: File)
extends ZipArchiveFileLookup[ClassFileEntryImpl]
with NoSourcePaths {
@@ -65,7 +66,7 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
* with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
* Name: scala/Function2$mcFJD$sp.class
*/
- private case class ManifestResourcesFlatClassPath(file: ManifestResources) extends FlatClassPath with NoSourcePaths {
+ private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths {
override def findClassFile(className: String): Option[AbstractFile] = {
val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
classes(pkg).find(_.name == simpleClassName).map(_.file)
@@ -75,8 +76,8 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
override def asURLs: Seq[URL] = file.toURLs()
- import ManifestResourcesFlatClassPath.PackageFileInfo
- import ManifestResourcesFlatClassPath.PackageInfo
+ import ManifestResourcesClassPath.PackageFileInfo
+ import ManifestResourcesClassPath.PackageInfo
/**
* A cache mapping package name to abstract file for package directory and subpackages of given package.
@@ -114,8 +115,8 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
}
val subpackages = getSubpackages(file)
- packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages))
- traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages.put(ClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue())
packages
}
@@ -132,21 +133,21 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
(for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
}
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage))
+ override private[nsc] def list(inPackage: String): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage))
}
- private object ManifestResourcesFlatClassPath {
+ private object ManifestResourcesClassPath {
case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
}
- override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath =
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath =
if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
- else ZipArchiveFlatClassPath(zipFile.file)
+ else ZipArchiveClassPath(zipFile.file)
private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
case manifestRes: ManifestResources =>
- ManifestResourcesFlatClassPath(manifestRes)
+ ManifestResourcesClassPath(manifestRes)
case _ =>
val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
throw new IllegalArgumentException(errorMsg)
@@ -154,11 +155,11 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
}
/**
- * Manages creation of flat classpath for source files placed in zip and jar files.
+ * Manages creation of classpath for source files placed in zip and jar files.
* It should be the only way of creating them as it provides caching.
*/
-object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
- private case class ZipArchiveFlatSourcePath(zipFile: File)
+object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory {
+ private case class ZipArchiveSourcePath(zipFile: File)
extends ZipArchiveFileLookup[SourceFileEntryImpl]
with NoClassPaths {
@@ -170,5 +171,5 @@ object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
}
- override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file)
+ override protected def createForZipFile(zipFile: AbstractFile): ClassPath = ZipArchiveSourcePath(zipFile.file)
}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
index a24d989306..9c147cf8cc 100644
--- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
+++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
@@ -9,13 +9,14 @@ import scala.collection.Seq
import scala.reflect.io.AbstractFile
import scala.reflect.io.FileZipArchive
import FileUtils.AbstractFileOps
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
/**
* A trait allowing to look for classpath entries of given type in zip and jar files.
* It provides common logic for classes handling class and source files.
* It's aware of things like e.g. META-INF directory which is correctly skipped.
*/
-trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
val zipFile: File
assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
@@ -39,7 +40,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends Flat
entry <- dirEntry.iterator if isRequiredFileType(entry)
} yield createFileEntry(entry)
- override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ override private[nsc] def list(inPackage: String): ClassPathEntries = {
val foundDirEntry = findDirEntry(inPackage)
foundDirEntry map { dirEntry =>
@@ -53,8 +54,8 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends Flat
else if (isRequiredFileType(entry))
fileBuf += createFileEntry(entry)
}
- FlatClassPathEntries(pkgBuf, fileBuf)
- } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty)
+ ClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse ClassPathEntries(Seq.empty, Seq.empty)
}
private def findDirEntry(pkg: String): Option[archive.DirEntry] = {
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 7b98011759..9a0d86a94d 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -200,7 +200,6 @@ trait ScalaSettings extends AbsScalaSettings
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
- val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Flat)
val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
@@ -389,8 +388,3 @@ trait ScalaSettings extends AbsScalaSettings
None
}
}
-
-object ClassPathRepresentationType {
- val Flat = "flat"
- val Recursive = "recursive"
-}
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 310025a336..7ef606b6ef 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -59,6 +59,7 @@ trait Warnings {
val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.")
val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.")
val StarsAlign = LintWarning("stars-align", "Pattern sequence wildcard must align with sequence component.")
+ val Constant = LintWarning("constant", "Evaluation of a constant arithmetic expression results in an error.")
def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]]
}
@@ -80,6 +81,7 @@ trait Warnings {
def warnPackageObjectClasses = lint contains PackageObjectClasses
def warnUnsoundMatch = lint contains UnsoundMatch
def warnStarsAlign = lint contains StarsAlign
+ def warnConstant = lint contains Constant
// Lint warnings that are currently -Y, but deprecated in that usage
@deprecated("Use warnAdaptedArgs", since="2.11.2")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 4f5589fd7c..b36d5d4ef1 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,8 @@ import classfile.ClassfileParser
import java.io.IOException
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
-import scala.reflect.io.{ AbstractFile, NoAbstractFile }
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.{ ClassPath, ClassRepresentation }
+import scala.reflect.io.{AbstractFile, NoAbstractFile}
+import scala.tools.nsc.util.{ClassPath, ClassRepresentation}
/** This class ...
*
@@ -154,7 +152,7 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
@@ -247,41 +245,11 @@ abstract class SymbolLoaders {
}
/**
- * Load contents of a package
- */
- class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
- protected def description = s"package loader ${classpath.name}"
-
- protected def doComplete(root: Symbol) {
- assert(root.isPackageClass, root)
- // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule`
- // creates a module symbol and invokes invokes `companionModule` while the `infos` field is
- // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks.
- enteringPhase(phaseBeforeRefchecks) {
- root.setInfo(new PackageClassInfoType(newScope, root))
-
- if (!root.isRoot) {
- for (classRep <- classpath.classes) {
- initializeFromClassPath(root, classRep)
- }
- }
- if (!root.isEmptyPackageClass) {
- for (pkg <- classpath.packages) {
- enterPackage(root, pkg.name, new PackageLoader(pkg))
- }
-
- openPackageModule(root)
- }
- }
- }
- }
-
- /**
* Loads contents of a package
*/
- class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter {
+ class PackageLoader(packageName: String, classPath: ClassPath) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = {
- val shownPackageName = if (packageName == FlatClassPath.RootPackage) "<root package>" else packageName
+ val shownPackageName = if (packageName == ClassPath.RootPackage) "<root package>" else packageName
s"package loader $shownPackageName"
}
@@ -298,9 +266,9 @@ abstract class SymbolLoaders {
val fullName = pkg.name
val name =
- if (packageName == FlatClassPath.RootPackage) fullName
+ if (packageName == ClassPath.RootPackage) fullName
else fullName.substring(packageName.length + 1)
- val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath)
+ val packageLoader = new PackageLoader(fullName, classPath)
enterPackage(root, name, packageLoader)
}
@@ -329,10 +297,7 @@ abstract class SymbolLoaders {
val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
- override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Recursive => platform.classPath
- case ClassPathRepresentationType.Flat => platform.flatClassPath
- }
+ override def classPath: ClassPath = platform.classPath
}
protected def description = "class file "+ classfile.toString
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index fffd48d145..0533d420cd 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -8,16 +8,16 @@ package tools.nsc
package symtab
package classfile
-import java.io.{ File, IOException }
+import java.io.{File, IOException}
import java.lang.Integer.toHexString
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+import scala.collection.{immutable, mutable}
+import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.annotation.switch
-import scala.reflect.internal.{ JavaAccFlags }
-import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
+import scala.reflect.internal.JavaAccFlags
+import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer}
import scala.reflect.io.NoAbstractFile
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.ClassFileLookup
/** This abstract class implements a class file parser.
*
@@ -43,8 +43,8 @@ abstract class ClassfileParser {
*/
protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
- /** The way of the class file lookup used by the compiler. */
- def classFileLookup: ClassFileLookup[AbstractFile]
+ /** The compiler classpath. */
+ def classPath: ClassPath
import definitions._
import scala.reflect.internal.ClassfileConstants._
@@ -357,7 +357,7 @@ abstract class ClassfileParser {
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = classFileLookup findClassFile name.toString getOrElse {
+ val file = classPath findClassFile name.toString getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
@@ -1079,7 +1079,7 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classFileLookup.findClassFile(entry.externalName.toString)
+ val file = classPath.findClassFile(entry.externalName.toString)
enterClassAndModule(entry, file.getOrElse(NoAbstractFile))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 0301e06c87..bc614dfc31 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -344,7 +344,7 @@ abstract class Erasure extends AddInterfaces
buf.toString
case RefinedType(parents, decls) =>
- boxedSig(intersectionDominator(parents))
+ jsig(intersectionDominator(parents), primitiveOK = primitiveOK)
case ClassInfoType(parents, _, _) =>
superSig(parents)
case AnnotatedType(_, atp) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 828a79ac4f..2cd4785fbf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -40,9 +40,10 @@ abstract class ConstantFolder {
if ((x ne null) && x.tag != UnitTag) tree setType ConstantType(x)
else tree
} catch {
- case _: ArithmeticException => tree // the code will crash at runtime,
- // but that is better than the
- // compiler itself crashing
+ case e: ArithmeticException =>
+ if (settings.warnConstant)
+ warning(tree.pos, s"Evaluation of a constant expression results in an arithmetic error: ${e.getMessage}")
+ tree
}
private def foldUnop(op: Name, x: Constant): Constant = (op, x.tag) match {
@@ -158,7 +159,7 @@ abstract class ConstantFolder {
else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
else NoTag
- try optag match {
+ optag match {
case BooleanTag => foldBooleanOp(op, x, y)
case ByteTag | ShortTag | CharTag | IntTag => foldSubrangeOp(op, x, y)
case LongTag => foldLongOp(op, x, y)
@@ -167,8 +168,5 @@ abstract class ConstantFolder {
case StringTag if op == nme.ADD => Constant(x.stringValue + y.stringValue)
case _ => null
}
- catch {
- case _: ArithmeticException => null
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index ccdff5c9a1..e190b57017 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -469,6 +469,11 @@ trait ContextErrors {
setError(tree)
}
+ def ConstructorRecursesError(tree: Tree) = {
+ issueNormalTypeError(tree, "constructor invokes itself")
+ setError(tree)
+ }
+
def OnlyDeclarationsError(tree: Tree) = {
issueNormalTypeError(tree, "only declarations allowed here")
setError(tree)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8f5c4b9f6d..329ce8c23b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2992,43 +2992,36 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def includesTargetPos(tree: Tree) =
tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
- def typedStat(stat: Tree): Tree = {
- if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat))
- OnlyDeclarationsError(stat)
- else
- stat match {
- case imp @ Import(_, _) =>
- imp.symbol.initialize
- if (!imp.symbol.isError) {
- context = context.make(imp)
- typedImport(imp)
- } else EmptyTree
- case _ =>
- if (localTarget && !includesTargetPos(stat)) {
- // skip typechecking of statements in a sequence where some other statement includes
- // the targetposition
- stat
- } else {
- val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) {
- this
- } else newTyper(context.make(stat, exprOwner))
- // XXX this creates a spurious dead code warning if an exception is thrown
- // in a constructor, even if it is the only thing in the constructor.
- val result = checkDead(localTyper.typedByValueExpr(stat))
-
- if (treeInfo.isSelfOrSuperConstrCall(result)) {
- context.inConstructorSuffix = true
- if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
- ConstructorsOrderError(stat)
- }
-
- if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
- "a pure expression does nothing in statement position; " +
- "you may be omitting necessary parentheses"
- )
- result
- }
+ def typedStat(stat: Tree): Tree = stat match {
+ case s if context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(s) => OnlyDeclarationsError(s)
+ case imp @ Import(_, _) =>
+ imp.symbol.initialize
+ if (!imp.symbol.isError) {
+ context = context.make(imp)
+ typedImport(imp)
+ } else EmptyTree
+ // skip typechecking of statements in a sequence where some other statement includes the targetposition
+ case s if localTarget && !includesTargetPos(s) => s
+ case _ =>
+ val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
+ else newTyper(context.make(stat, exprOwner))
+ // XXX this creates a spurious dead code warning if an exception is thrown
+ // in a constructor, even if it is the only thing in the constructor.
+ val result = checkDead(localTyper.typedByValueExpr(stat))
+
+ if (treeInfo.isSelfOrSuperConstrCall(result)) {
+ context.inConstructorSuffix = true
+ if (treeInfo.isSelfConstrCall(result)) {
+ if (result.symbol == exprOwner.enclMethod)
+ ConstructorRecursesError(stat)
+ else if (result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
+ ConstructorsOrderError(stat)
+ }
}
+ if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
+ "a pure expression does nothing in statement position; you may be omitting necessary parentheses"
+ )
+ result
}
/* 'accessor' and 'accessed' are so similar it becomes very difficult to
diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
deleted file mode 100644
index 5d8831a607..0000000000
--- a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.util
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.classpath.{AggregateFlatClassPath, FlatClassPath, FlatClassPathFactory}
-import scala.tools.nsc.io.AbstractFile
-import java.net.URL
-
-/**
- * Simple interface that allows us to abstract over how class file lookup is performed
- * in different classpath representations.
- */
-// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic
-// T should be just changed to AbstractFile
-trait ClassFileLookup[T] {
- def findClassFile(name: String): Option[AbstractFile]
-
- /**
- * It returns both classes from class file and source files (as our base ClassRepresentation).
- * So note that it's not so strictly related to findClassFile.
- */
- def findClass(name: String): Option[ClassRepresentation[T]]
-
- /**
- * A sequence of URLs representing this classpath.
- */
- def asURLs: Seq[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClassPathString: String
-
- // for compatibility purposes
- @deprecated("Use asClassPathString instead of this one", "2.11.5")
- def asClasspathString: String = asClassPathString
-
- /** The whole sourcepath in the form of one String.
- */
- def asSourcePathString: String
-}
-
-object ClassFileLookup {
- def createForFile(f: AbstractFile, current: ClassFileLookup[AbstractFile], settings: Settings): ClassFileLookup[AbstractFile] = current match {
- case cp: ClassPath[_] => cp.context.newClassPath(f)
- case _: FlatClassPath => FlatClassPathFactory.newClassPath(f, settings)
- }
-
- def createAggregate(elems: Iterable[ClassFileLookup[AbstractFile]], current: ClassFileLookup[AbstractFile]): ClassFileLookup[AbstractFile] = {
- assert(elems.nonEmpty)
- if (elems.size == 1) elems.head
- else current match {
- case cp: ClassPath[_] =>
- new MergedClassPath(elems.asInstanceOf[Iterable[ClassPath[AbstractFile]]], cp.context)
-
- case _: FlatClassPath =>
- AggregateFlatClassPath.createAggregate(elems.asInstanceOf[Iterable[FlatClassPath]].toSeq : _*)
- }
- }
-}
-
-/**
- * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
- */
-// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic
-// T should be just changed to AbstractFile
-trait ClassRepresentation[T] {
- def binary: Option[T]
- def source: Option[AbstractFile]
-
- def name: String
-}
-
-object ClassRepresentation {
- def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] =
- Some((classRep.binary, classRep.source))
-}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index e5af1edadb..cef2fc4bbf 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -7,28 +7,61 @@
package scala.tools.nsc
package util
-import io.{ AbstractFile, Directory, File, Jar }
+import io.{AbstractFile, Directory, File, Jar}
import java.net.MalformedURLException
import java.net.URL
import java.util.regex.PatternSyntaxException
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.StringOps.splitWhere
-import scala.tools.nsc.classpath.FileUtils
import File.pathSeparator
-import FileUtils.endsClass
-import FileUtils.endsScalaOrJava
import Jar.isJarOrZip
-/** <p>
- * This module provides star expansion of '-classpath' option arguments, behaves the same as
- * java, see [[http://docs.oracle.com/javase/6/docs/technotes/tools/windows/classpath.html]]
- * </p>
- *
- * @author Stepan Koltsov
- */
+/**
+ * A representation of the compiler's class- or sourcepath.
+ */
+trait ClassPath {
+ import scala.tools.nsc.classpath._
+ def asURLs: Seq[URL]
+
+ /** Empty string represents root package */
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry]
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[nsc] def list(inPackage: String): ClassPathEntries
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(className: String): Option[ClassRepresentation] = {
+ // A default implementation which should be overridden, if we can create the more efficient
+ // solution for a given type of ClassPath
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg).find(_.name == simpleClassName)
+ def findClassInSources = sources(pkg).find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+ def findClassFile(className: String): Option[AbstractFile]
+
+ def asClassPathStrings: Seq[String]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ // for compatibility purposes
+ @deprecated("Use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
object ClassPath {
- import scala.language.postfixOps
+ val RootPackage = ""
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
@@ -36,14 +69,14 @@ object ClassPath {
/* Get all subdirectories, jars, zips out of a directory. */
def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
- dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
+ dir.list.filter(x => filt(x.name) && (x.isDirectory || isJarOrZip(x))).map(_.path).toList
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
try {
val regexp = ("^" + pattern.replaceAllLiterally("""\*""", """.*""") + "$").r
- lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ lsDir(Directory(pattern).parent, regexp.findFirstIn(_).isDefined)
}
catch { case _: PatternSyntaxException => List(pattern) }
}
@@ -51,7 +84,7 @@ object ClassPath {
}
/** Split classpath using platform-dependent path separator */
- def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+ def split(path: String): List[String] = (path split pathSeparator).toList.filterNot(_ == "").distinct
/** Join classpath using platform-dependent path separator */
def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
@@ -68,9 +101,10 @@ object ClassPath {
def expandDir(extdir: String): List[String] = {
AbstractFile getDirectory extdir match {
case null => Nil
- case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
+ case dir => dir.filter(_.isClassContainer).map(x => new java.io.File(dir.file, x.name).getPath).toList
}
}
+
/** Expand manifest jar classpath entries: these are either urls, or paths
* relative to the location of the jar.
*/
@@ -88,301 +122,30 @@ object ClassPath {
try Some(new URL(spec))
catch { case _: MalformedURLException => None }
- /** A class modeling aspects of a ClassPath which should be
- * propagated to any classpaths it creates.
- */
- abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] {
- /** A filter which can be used to exclude entities from the classpath
- * based on their name.
- */
- def isValidName(name: String): Boolean = true
-
- /** Filters for assessing validity of various entities.
- */
- def validClassFile(name: String) = endsClass(name) && isValidName(name)
- def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScalaOrJava(name)
-
- /** From the representation to its identifier.
- */
- def toBinaryName(rep: T): String
-
- def sourcesInPath(path: String): List[ClassPath[T]] =
- for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
- new SourcePath[T](dir, this)
- }
-
def manifests: List[java.net.URL] = {
import scala.collection.JavaConverters._
val resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF")
resources.asScala.filter(_.getProtocol == "jar").toList
}
- class JavaContext extends ClassPathContext[AbstractFile] {
- def toBinaryName(rep: AbstractFile) = {
- val name = rep.name
- assert(endsClass(name), name)
- FileUtils.stripClassExtension(name)
- }
+ @deprecated("Shim for sbt's compiler interface", since = "2.12")
+ sealed abstract class ClassPathContext
- def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
- }
-
- object DefaultJavaContext extends JavaContext
-
- /** From the source file to its identifier.
- */
- def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name)
+ @deprecated("Shim for sbt's compiler interface", since = "2.12")
+ sealed abstract class JavaContext
}
-import ClassPath._
-
-/**
- * Represents a package which contains classes and other packages
- */
-abstract class ClassPath[T] extends ClassFileLookup[T] {
- /**
- * The short name of the package (without prefix)
- */
+trait ClassRepresentation {
def name: String
-
- /**
- * A String representing the origin of this classpath element, if known.
- * For example, the path of the directory or jar.
- */
- def origin: Option[String] = None
-
- /** Info which should be propagated to any sub-classpaths.
- */
- def context: ClassPathContext[T]
-
- /** Lists of entities.
- */
- def classes: IndexedSeq[ClassRepresentation[T]]
- def packages: IndexedSeq[ClassPath[T]]
- def sourcepaths: IndexedSeq[AbstractFile]
-
- /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
- * Subclasses such as `MergedClassPath` typically return lists with more elements.
- */
- def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
-
- /** Merge classpath of `platform` and `urls` into merged classpath */
- def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
- // Collect our new jars/directories and add them to the existing set of classpaths
- val allEntries =
- (entries ++
- urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
- ).distinct
-
- // Combine all of our classpaths (old and new) into one merged classpath
- new MergedClassPath(allEntries, context)
- }
-
- /**
- * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
- */
- case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] {
- def name: String = binary match {
- case Some(x) => context.toBinaryName(x)
- case _ =>
- assert(source.isDefined)
- toSourceName(source.get)
- }
- }
-
- /** Filters for assessing validity of various entities.
- */
- def validClassFile(name: String) = context.validClassFile(name)
- def validPackage(name: String) = context.validPackage(name)
- def validSourceFile(name: String) = context.validSourceFile(name)
-
- /**
- * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
- * Does not support nested classes on .NET
- */
- override def findClass(name: String): Option[ClassRepresentation[T]] =
- splitWhere(name, _ == '.', doDropIndex = true) match {
- case Some((pkg, rest)) =>
- val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
- rep map {
- case x: ClassRepresentation[T] => x
- case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
- }
- case _ =>
- classes find (_.name == name)
- }
-
- override def findClassFile(name: String): Option[AbstractFile] =
- findClass(name) match {
- case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x)
- case _ => None
- }
-
- override def asSourcePathString: String = sourcepaths.mkString(pathSeparator)
-
- def sortString = join(split(asClassPathString).sorted: _*)
- override def equals(that: Any) = that match {
- case x: ClassPath[_] => this.sortString == x.sortString
- case _ => false
- }
- override def hashCode = sortString.hashCode()
-}
-
-/**
- * A Classpath containing source files
- */
-class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
- import FileUtils.AbstractFileOps
-
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.toURLs()
- def asClassPathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
-
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[SourcePath[T]]
- dir foreach { f =>
- if (!f.isDirectory && validSourceFile(f.name))
- classBuf += ClassRep(None, Some(f))
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new SourcePath[T](f, context)
- }
- (packageBuf.result(), classBuf.result())
- }
-
- lazy val (packages, classes) = traverse()
- override def toString() = "sourcepath: "+ dir.toString()
+ def binary: Option[AbstractFile]
+ def source: Option[AbstractFile]
}
-/**
- * A directory (or a .jar file) containing classfiles and packages
- */
-class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
- import FileUtils.AbstractFileOps
-
- def name = dir.name
- override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.toURLs(default = Seq(new URL(name)))
- def asClassPathString = dir.path
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- // calculates (packages, classes) in one traversal.
- private def traverse() = {
- val classBuf = immutable.Vector.newBuilder[ClassRep]
- val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach {
- f =>
- // Optimization: We assume the file was not changed since `dir` called
- // `Path.apply` and categorized existent files as `Directory`
- // or `File` (avoids IO operation JFile.isDirectory()).
- val isDirectory = f match {
- case pf: io.PlainFile => pf.givenPath match {
- case _: io.Directory => true
- case _: io.File => false
- case _ => f.isDirectory
- }
- case _ =>
- f.isDirectory
- }
- if (!isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
- }
- (packageBuf.result(), classBuf.result())
- }
+@deprecated("Shim for sbt's compiler interface", since = "2.12")
+sealed abstract class DirectoryClassPath
- lazy val (packages, classes) = traverse()
- override def toString() = "directory classpath: "+ origin.getOrElse("?")
-}
+@deprecated("Shim for sbt's compiler interface", since = "2.12")
+sealed abstract class MergedClassPath
-/**
- * A classpath unifying multiple class- and sourcepath entries.
- */
-class MergedClassPath[T](
- override val entries: IndexedSeq[ClassPath[T]],
- val context: ClassPathContext[T])
-extends ClassPath[T] {
-
- def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
- this(entries.toIndexedSeq, context)
-
- def name = entries.head.name
- def asURLs = (entries flatMap (_.asURLs)).toList
- lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
-
- override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClassPathString: String = join(entries map (_.asClassPathString) : _*)
-
- lazy val classes: IndexedSeq[ClassRepresentation[T]] = {
- var count = 0
- val indices = mutable.HashMap[String, Int]()
- val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024)
-
- for (e <- entries; c <- e.classes) {
- val name = c.name
- if (indices contains name) {
- val idx = indices(name)
- val existing = cls(idx)
-
- if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = ClassRep(binary = c.binary, source = existing.source)
- if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = ClassRep(binary = existing.binary, source = c.source)
- }
- else {
- indices(name) = count
- cls += c
- count += 1
- }
- }
- cls.toIndexedSeq
- }
-
- lazy val packages: IndexedSeq[ClassPath[T]] = {
- var count = 0
- val indices = mutable.HashMap[String, Int]()
- val pkg = new mutable.ArrayBuffer[ClassPath[T]](256)
-
- for (e <- entries; p <- e.packages) {
- val name = p.name
- if (indices contains name) {
- val idx = indices(name)
- pkg(idx) = addPackage(pkg(idx), p)
- }
- else {
- indices(name) = count
- pkg += p
- count += 1
- }
- }
- pkg.toIndexedSeq
- }
-
- private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = {
- val newEntries: IndexedSeq[ClassPath[T]] = to match {
- case cp: MergedClassPath[_] => cp.entries :+ pkg
- case _ => IndexedSeq(to, pkg)
- }
- new MergedClassPath[T](newEntries, context)
- }
-
- def show() {
- println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClassPathString split ':' foreach (x => println(" " + x))
- }
-
- override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
-}
-
-/**
- * The classpath when compiling with target:jvm. Binary files (classfiles) are represented
- * as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
- */
-class JavaClassPath(
- containers: IndexedSeq[ClassPath[AbstractFile]],
- context: JavaContext)
-extends MergedClassPath[AbstractFile](containers, context) { }
+@deprecated("Shim for sbt's compiler interface", since = "2.12")
+sealed abstract class JavaClassPath
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 8d8418945a..7d82910699 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -5,12 +5,12 @@ import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.util.PathResolverFactory
+import scala.tools.util.PathResolver
object ReflectMain extends Driver {
private def classloaderFromSettings(settings: Settings) = {
- val classPathURLs = PathResolverFactory.create(settings).resultAsURLs
+ val classPathURLs = new PathResolver(settings).resultAsURLs
ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader)
}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 9decc99c8d..c351b6ace1 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -10,12 +10,10 @@ package util
import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath }
-import scala.reflect.io.{ File, Directory, Path, AbstractFile }
-import ClassPath.{ JavaContext, DefaultJavaContext, split }
+import scala.tools.nsc.util.ClassPath
+import scala.reflect.io.{Directory, File, Path}
import PartialFunction.condOpt
-import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory }
-import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.classpath._
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
@@ -40,7 +38,7 @@ object PathResolver {
}
/** pretty print class path */
- def ppcp(s: String) = split(s) match {
+ def ppcp(s: String) = ClassPath.split(s) match {
case Nil => ""
case Seq(x) => x
case xs => xs.mkString(EOL, EOL, "")
@@ -164,13 +162,6 @@ object PathResolver {
|}""".asLines
}
- @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5")
- def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
- val s = new Settings()
- s.classpath.value = path
- new PathResolver(s, context).result
- }
-
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
@@ -182,28 +173,19 @@ object PathResolver {
} else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
- val pr = PathResolverFactory.create(settings)
+ val pr = new PathResolver(settings)
println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
pr.result match {
- case cp: JavaClassPath =>
- cp.show()
- case cp: AggregateFlatClassPath =>
+ case cp: AggregateClassPath =>
println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
}
}
}
-trait PathResolverResult {
- def result: ClassFileLookup[AbstractFile]
-
- def resultAsURLs: Seq[URL] = result.asURLs
-}
-
-abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType]
-(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType])
- extends PathResolverResult {
+final class PathResolver(settings: Settings) {
+ private val classPathFactory = new ClassPathFactory(settings)
import PathResolver.{ AsLines, Defaults, ppcp }
@@ -251,7 +233,7 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
import classPathFactory._
// Assemble the elements!
- def basis = List[Traversable[BaseClassPathType]](
+ def basis = List[Traversable[ClassPath]](
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -282,7 +264,7 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
import PathResolver.MkLines
- def result: ResultClassPathType = {
+ def result: ClassPath = {
val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
@@ -295,34 +277,11 @@ abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFil
cp
}
+ def resultAsURLs: Seq[URL] = result.asURLs
+
@deprecated("Use resultAsURLs instead of this one", "2.11.5")
def asURLs: List[URL] = resultAsURLs.toList
- protected def computeResult(): ResultClassPathType
+ private def computeResult(): ClassPath = AggregateClassPath(containers.toIndexedSeq)
}
-class PathResolver(settings: Settings, context: JavaContext)
- extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) {
-
- def this(settings: Settings) = this(settings, DefaultJavaContext)
-
- override protected def computeResult(): JavaClassPath =
- new JavaClassPath(containers.toIndexedSeq, context)
-}
-
-class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath])
- extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) {
-
- def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings))
-
- override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq)
-}
-
-object PathResolverFactory {
-
- def create(settings: Settings): PathResolverResult =
- settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings)
- case ClassPathRepresentationType.Recursive => new PathResolver(settings)
- }
-}
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 0b98dc67da..22afd65d43 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -9,6 +9,6 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0_M4-1.0.13.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0-M4-1.0.14.jar"/>
<classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index 870d1da61d..b4450df4ef 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -6,8 +6,8 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0_M4-1.0.5.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-parser-combinators_2.12.0_M4-1.0.4.jar"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0_M4-1.0.13.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0-M4-1.0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-parser-combinators_2.12.0-M4-1.0.4.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/partest/scala-partest_2.12.0-M4-1.0.14.jar"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index 881b2b79ca..3635c85112 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -10,7 +10,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0_M4-1.0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/scaladoc/scala-xml_2.12.0-M4-1.0.5.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/intellij/README.md b/src/intellij/README.md
index dcad699d43..41fef04183 100644
--- a/src/intellij/README.md
+++ b/src/intellij/README.md
@@ -1,25 +1,25 @@
-# Building Scala in IntelliJ IDEA
-
-## Requirements
+# Developing Scala in IntelliJ IDEA
Use the latest IntelliJ release and install the Scala plugin from within the IDE.
-## Initial setup
+## Initial Setup
To create the IntelliJ project files:
- Run `sbt intellij`
- Open `src/intellij/scala.ipr` in IntelliJ
- - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry named "1.8" containing the Java 1.8 SDK
+ - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry
+ named "1.8" containing the Java 1.8 SDK (1.6 if you're on the Scala the 2.11.x branch)
-The project files are created by as copies of the `.SAMPLE` files, which are under version control.
-The actual IntelliJ project files are in `.gitignore` so that local changes are ignored.
+The project files are created as copies of the `.SAMPLE` files, which are under version
+control. The actual IntelliJ project files are in `.gitignore` so that local changes
+are ignored.
## Dependencies
For every module in the IntelliJ project there is a corresponding `-deps` library, for exmaple `compiler-deps` provides `ant.jar` for the compiler codebase.
The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again.
-This is necessary whenever the dependencies in the sbt build change, for example when the STARR version is updated.
+This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated.
Note that this command only patches the dependency lists, all other settings in the IntelliJ project definition are unchanged.
To overwrite the project definition files by copying the `.SAMPLE` files again run `sbt intellijFromSample`.
@@ -33,17 +33,49 @@ When switching between 2.11.x and 2.12.x, make sure to run `sbt intellij`.
Note that the `Project SDK` is not updated in this process.
If you want to use the Java 1.6 SDK while working on 2.11.x you need to change it manually (`File` → `Project Structure` → `Project` → `Project SDK`).
-## Usage
+If you switch between 2.11.x and 2.12.x often, it makes sense to have a separate clone
+of the repository for each branch.
+
+## Incremental Compilation
+
+Run `Build` → `Make Project` to build all modules of the Scala repository (library,
+compiler, etc). Note that compilation IntelliJ is performed in a single pass (no
+bootstrap), like the sbt build.
+
+Note that the output directory when compiling in IntelliJ is the same as for the
+sbt and (deprecated) ant builds. This allows building incrementally in IntelliJ
+and directly use the changes using the command-line scripts in `build/quick/bin/`.
+
+## Running JUnit Tests
+
+JUnit tests can be executed by right-clicking on a test class or test method and
+selecting "Run" or "Debug". The debugger will allow you to stop at breakpoints
+within the Scala library.
+
+It is possible to invoke the Scala compiler from a JUnit test (passing the source
+code as a string) and inspect the generated bytecode, see for example
+`scala.issues.BytecodeTest`. Debugging such a test is an easy way to stop at
+breakpoints within the Scala compiler.
+
+## Running the Compiler and REPL
+
+You can create run/debug configurations to run the compiler and REPL directly within
+IntelliJ, which might accelerate development and debugging of the the compiler.
-Compiling, running, JUnit tests and debugging should all work.
-You can work on the compiler, the standard library, and other components as well.
+To debug the Scala codebase you can also use "Remote" debug configuration and pass
+the corresponding arguments to the jvm running the compiler / program.
-Note that compilation within IntelliJ is performed in a single pass.
-The code is compiled using the "STARR" (stable reference) compiler, as specified by `starr.version` in `versions.properties`.
-This is consistent with the sbt build.
+To run the compiler create an "Application" configuration with
+ - Main class: `scala.tools.nsc.Main`
+ - Program arguments: `-usejavacp -cp sandbox -d sandbox sandbox/Test.scala`
+ - Working directory: the path of your checkout
+ - Use classpath of module: `compiler`
-Note that the output directory when compiling in IntelliJ is the same as for the sbt build.
-This allows building incrementally in IntelliJ and directly use the changes using the command-line scripts in `build/quick/bin/`.
+To run the REPL create an "Application" configuration with
+ - Main class: `scala.tools.nsc.MainGenericRunner`
+ - Program arguments: `-usejavacp`
+ - Working directory: the path of your checkout
+ - Use classpath of module: `repl`
## Updating the `.SAMPLE` files
diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE
index 8184b0e45b..79ad2808f6 100644
--- a/src/intellij/scala.ipr.SAMPLE
+++ b/src/intellij/scala.ipr.SAMPLE
@@ -77,7 +77,7 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12.0-M3-dc9effe/bundles/scala-xml_2.12.0-M3-dc9effe-1.0.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.12.0-M3-dc9effe/bundles/scala-parser-combinators_2.12.0-M3-dc9effe-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.12.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -100,7 +100,7 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12.0-M3-dc9effe/bundles/scala-xml_2.12.0-M3-dc9effe-1.0.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.12.0-M3-dc9effe/bundles/scala-parser-combinators_2.12.0-M3-dc9effe-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.12.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12.0-M3-dc9effe/jars/scala-partest_2.12.0-M3-dc9effe-1.0.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
@@ -126,7 +126,7 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12.0-M3-dc9effe/bundles/scala-xml_2.12.0-M3-dc9effe-1.0.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.12.0-M3-dc9effe/bundles/scala-parser-combinators_2.12.0-M3-dc9effe-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.12.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12.0-M3-dc9effe/jars/scala-partest_2.12.0-M3-dc9effe-1.0.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
@@ -159,7 +159,7 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12.0-M3-dc9effe/bundles/scala-xml_2.12.0-M3-dc9effe-1.0.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.12.0-M3-dc9effe/bundles/scala-parser-combinators_2.12.0-M3-dc9effe-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.12.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -303,7 +303,7 @@
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-asm/bundles/scala-asm-5.0.4-scala-3.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-xml_2.12.0-M3-dc9effe/bundles/scala-xml_2.12.0-M3-dc9effe-1.0.5.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-parser-combinators_2.12.0-M3-dc9effe/bundles/scala-parser-combinators_2.12.0-M3-dc9effe-1.0.4.jar!/" />
- <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.12.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.14.1.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-partest_2.12.0-M3-dc9effe/jars/scala-partest_2.12.0-M3-dc9effe-1.0.13.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/com.googlecode.java-diff-utils/diffutils/jars/diffutils-1.3.0.jar!/" />
<root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
@@ -312,4 +312,4 @@
<SOURCES />
</library>
</component>
-</project> \ No newline at end of file
+</project>
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 9cd38ed148..f3a96fb333 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -19,7 +19,7 @@ package scala
*/
trait Product extends Any with Equals {
/** The n^th^ element of this product, 0-based. In other words, for a
- * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
+ * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`.
*
* @param n the index of the element to return
* @throws IndexOutOfBoundsException
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 6bb1f116fe..ecf3326c7f 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -68,6 +68,8 @@ object BitSet extends BitSetFactory[BitSet] {
/** The empty bitset */
val empty: BitSet = new BitSet1(0L)
+ private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b)
+
/** A builder that takes advantage of mutable BitSets. */
def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
private[this] val b = new mutable.BitSet
@@ -84,7 +86,7 @@ object BitSet extends BitSetFactory[BitSet] {
val len = elems.length
if (len == 0) empty
else if (len == 1) new BitSet1(elems(0))
- else if (len == 2) new BitSet2(elems(0), elems(1))
+ else if (len == 2) createSmall(elems(0), elems(1))
else {
val a = new Array[Long](len)
Array.copy(elems, 0, a, 0, len)
@@ -99,7 +101,7 @@ object BitSet extends BitSetFactory[BitSet] {
val len = elems.length
if (len == 0) empty
else if (len == 1) new BitSet1(elems(0))
- else if (len == 2) new BitSet2(elems(0), elems(1))
+ else if (len == 2) createSmall(elems(0), elems(1))
else new BitSetN(elems)
}
@@ -109,7 +111,7 @@ object BitSet extends BitSetFactory[BitSet] {
protected def word(idx: Int) = if (idx == 0) elems else 0L
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet1(w)
- else if (idx == 1) new BitSet2(elems, w)
+ else if (idx == 1) createSmall(elems, w)
else fromBitMaskNoCopy(updateArray(Array(elems), idx, w))
override def head: Int =
if (elems == 0L) throw new NoSuchElementException("Empty BitSet")
@@ -124,7 +126,7 @@ object BitSet extends BitSetFactory[BitSet] {
protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet2(w, elems1)
- else if (idx == 1) new BitSet2(elems0, w)
+ else if (idx == 1) createSmall(elems0, w)
else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w))
override def head: Int =
if (elems0 == 0L) {
@@ -135,7 +137,7 @@ object BitSet extends BitSetFactory[BitSet] {
override def tail: BitSet =
if (elems0 == 0L) {
if (elems1 == 0L) throw new NoSuchElementException("Empty BitSet")
- new BitSet2(elems0, elems1 - java.lang.Long.lowestOneBit(elems1))
+ createSmall(elems0, elems1 - java.lang.Long.lowestOneBit(elems1))
}
else new BitSet2(elems0 - java.lang.Long.lowestOneBit(elems0), elems1)
}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index e1bcc0711c..589f8bbba9 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala
package collection
package immutable
@@ -15,214 +13,154 @@ package immutable
import generic._
import scala.annotation.tailrec
-/** $factoryInfo
- * @since 1
- * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]]
- * section on `List Maps` for more information.
- *
- * Note that `ListMap` is built in reverse order to canonical traversal order (traversal order is oldest first).
- * Thus, `head` and `tail` are O(n). To rapidly partition a `ListMap` into elements, use `last` and `init` instead. These are O(1).
- *
- * @define Coll immutable.ListMap
- * @define coll immutable list map
- */
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list map with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]]
+ * section on `List Maps` for more information.
+ * @since 1
+ * @define Coll ListMap
+ * @define coll list map
+ */
object ListMap extends ImmutableMapFactory[ListMap] {
- /** $mapCanBuildFromInfo */
+
+ /**
+ * $mapCanBuildFromInfo
+ */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] =
new MapCanBuildFrom[A, B]
+
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
@SerialVersionUID(-8256686706655863282L)
- private object EmptyListMap extends ListMap[Any, Nothing] {
- override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
- override def contains(key: Any) = false
- override def last: (Any, Nothing) = throw new NoSuchElementException("Empty ListMap")
- override def init: ListMap[Any, Nothing] = throw new NoSuchElementException("Empty ListMap")
- }
+ private object EmptyListMap extends ListMap[Any, Nothing]
}
-/** This class implements immutable maps using a list-based data structure, which preserves insertion order.
- * Instances of `ListMap` represent empty maps; they can be either created by
- * calling the constructor directly, or by applying the function `ListMap.empty`.
- *
- * @tparam A the type of the keys in this list map.
- * @tparam B the type of the values associated with the keys.
- *
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.0, 01/01/2007
- * @since 1
- * @define Coll immutable.ListMap
- * @define coll immutable list map
- * @define mayNotTerminateInf
- * @define willNotTerminateInf
- */
+/**
+ * This class implements immutable maps using a list-based data structure. List map iterators and
+ * traversal methods visit key-value pairs in the order whey were first inserted.
+ *
+ * Entries are stored internally in reversed insertion order, which means the newest key is at the
+ * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init`
+ * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes
+ * this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListMap` represent empty maps; they can be either created by calling the
+ * constructor directly, or by applying the function `ListMap.empty`.
+ *
+ * @tparam A the type of the keys contained in this list map
+ * @tparam B the type of the values associated with the keys
+ *
+ * @author Matthias Zenger
+ * @author Martin Odersky
+ * @version 2.0, 01/01/2007
+ * @since 1
+ * @define Coll ListMap
+ * @define coll list map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
@SerialVersionUID(301002838095710379L)
-sealed class ListMap[A, +B]
-extends AbstractMap[A, B]
- with Map[A, B]
- with MapLike[A, B, ListMap[A, B]]
- with Serializable {
+sealed class ListMap[A, +B] extends AbstractMap[A, B]
+ with Map[A, B]
+ with MapLike[A, B, ListMap[A, B]]
+ with Serializable {
override def empty = ListMap.empty
- /** Returns the number of mappings in this map.
- *
- * @return number of mappings in this map.
- */
override def size: Int = 0
+ override def isEmpty: Boolean = true
- /** Checks if this map maps `key` to a value and return the
- * value if it exists.
- *
- * @param key the key of the mapping of interest
- * @return the value of the mapping, if it exists
- */
def get(key: A): Option[B] = None
- /** This method allows one to create a new map with an additional mapping
- * from `key` to `value`. If the map contains already a mapping for `key`,
- * it will be overridden by this function.
- *
- * @param key the key element of the updated entry.
- * @param value the value element of the updated entry.
- */
- override def updated [B1 >: B] (key: A, value: B1): ListMap[A, B1] =
- new Node[B1](key, value)
-
- /** Add a key/value pair to this map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
- */
- def + [B1 >: B] (kv: (A, B1)): ListMap[A, B1] = updated(kv._1, kv._2)
-
- /** Adds two or more elements to this collection and returns
- * either the collection itself (if it is mutable), or a new collection
- * with the added elements.
- *
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- */
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): ListMap[A, B1] =
- this + elem1 + elem2 ++ elems
-
- /** Adds a number of elements provided by a traversable object
- * and returns a new collection with the added elements.
- *
- * @param xs the traversable object.
- */
+ override def updated[B1 >: B](key: A, value: B1): ListMap[A, B1] = new Node[B1](key, value)
+
+ def +[B1 >: B](kv: (A, B1)): ListMap[A, B1] = new Node[B1](kv._1, kv._2)
+ def -(key: A): ListMap[A, B] = this
+
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
- ((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
-
- /** This creates a new mapping without the given `key`.
- * If the map does not contain a mapping for the given key, the
- * method returns the same map.
- *
- * @param key a map without a mapping for the given key.
- */
- def - (key: A): ListMap[A, B] = this
-
- /** Returns an iterator over key-value pairs.
- */
- def iterator: Iterator[(A,B)] =
- new AbstractIterator[(A,B)] {
- var self: ListMap[A,B] = ListMap.this
- def hasNext = !self.isEmpty
- def next(): (A,B) =
- if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.next; res }
- }.toList.reverseIterator
-
- protected def key: A = throw new NoSuchElementException("empty map")
- protected def value: B = throw new NoSuchElementException("empty map")
- protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
-
- /** This class represents an entry in the `ListMap`.
- */
+ if (xs.isEmpty) this
+ else ((repr: ListMap[A, B1]) /: xs) (_ + _)
+
+ def iterator: Iterator[(A, B)] = {
+ def reverseList = {
+ var curr: ListMap[A, B] = this
+ var res: List[(A, B)] = Nil
+ while (!curr.isEmpty) {
+ res = (curr.key, curr.value) :: res
+ curr = curr.next
+ }
+ res
+ }
+ reverseList.iterator
+ }
+
+ protected def key: A = throw new NoSuchElementException("key of empty map")
+ protected def value: B = throw new NoSuchElementException("value of empty map")
+ protected def next: ListMap[A, B] = throw new NoSuchElementException("next of empty map")
+
+ override def stringPrefix = "ListMap"
+
+ /**
+ * Represents an entry in the `ListMap`.
+ */
@SerialVersionUID(-6453056603889598734L)
protected class Node[B1 >: B](override protected val key: A,
override protected val value: B1) extends ListMap[A, B1] with Serializable {
- /** Returns the number of mappings in this map.
- *
- * @return number of mappings.
- */
- override def size: Int = size0(this, 0)
-
- // to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
-
- /** Is this an empty map?
- *
- * @return true, iff the map is empty.
- */
- override def isEmpty: Boolean = false
- /** Retrieves the value which is associated with the given key. This
- * method throws an exception if there is no mapping from the given
- * key to a value.
- *
- * @param k the key
- * @return the value associated with the given key.
- */
- override def apply(k: A): B1 = apply0(this, k)
-
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
- if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
- else if (k == cur.key) cur.value
- else apply0(cur.next, k)
+ override def size: Int = sizeInternal(this, 0)
+
+ @tailrec private[this] def sizeInternal(cur: ListMap[A, B1], acc: Int): Int =
+ if (cur.isEmpty) acc
+ else sizeInternal(cur.next, acc + 1)
- /** Checks if this map maps `key` to a value and return the
- * value if it exists.
- *
- * @param k the key of the mapping of interest
- * @return the value of the mapping, if it exists
- */
- override def get(k: A): Option[B1] = get0(this, k)
+ override def isEmpty: Boolean = false
+
+ override def apply(k: A): B1 = applyInternal(this, k)
- @tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
- if (k == cur.key) Some(cur.value)
- else if (cur.next.nonEmpty) get0(cur.next, k) else None
+ @tailrec private[this] def applyInternal(cur: ListMap[A, B1], k: A): B1 =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k)
+ else if (k == cur.key) cur.value
+ else applyInternal(cur.next, k)
+ override def get(k: A): Option[B1] = getInternal(this, k)
- override def contains(key: A): Boolean = contains0(this, key)
+ @tailrec private[this] def getInternal(cur: ListMap[A, B1], k: A): Option[B1] =
+ if (cur.isEmpty) None
+ else if (k == cur.key) Some(cur.value)
+ else getInternal(cur.next, k)
- @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
- if (k == cur.key) true
- else if (cur.next.nonEmpty) contains0(cur.next, k)
- else false
+ override def contains(k: A): Boolean = containsInternal(this, k)
+ @tailrec private[this] def containsInternal(cur: ListMap[A, B1], k: A): Boolean =
+ if(cur.isEmpty) false
+ else if (k == cur.key) true
+ else containsInternal(cur.next, k)
- /** This method allows one to create a new map with an additional mapping
- * from `key` to `value`. If the map contains already a mapping for `key`,
- * it will be overridden by this function.
- */
- override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
+ override def updated[B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
val m = this - k
new m.Node[B2](k, v)
}
+ override def +[B2 >: B1](kv: (A, B2)): ListMap[A, B2] = {
+ val m = this - kv._1
+ new m.Node[B2](kv._1, kv._2)
+ }
- /** Creates a new mapping without the given `key`.
- * If the map does not contain a mapping for the given key, the
- * method returns the same map.
- */
- override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil)
+ override def -(k: A): ListMap[A, B1] = removeInternal(k, this, Nil)
- @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
- if (cur.isEmpty)
- acc.last
- else if (k == cur.key)
- (cur.next /: acc) {
- case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
- }
- else
- remove0(k, cur.next, cur::acc)
+ @tailrec private[this] def removeInternal(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) }
+ else removeInternal(k, cur.next, cur :: acc)
override protected def next: ListMap[A, B1] = ListMap.this
override def last: (A, B1) = (key, value)
-
override def init: ListMap[A, B1] = next
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index d20e7bc6d2..d9795e9161 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -12,174 +12,125 @@ package immutable
import generic._
import scala.annotation.tailrec
-import mutable.{Builder, ReusableBuilder}
-/** $factoryInfo
- * @define Coll immutable.ListSet
- * @define coll immutable list set
- * @since 1
- */
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list set with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @since 1
+ * @define Coll ListSet
+ * @define coll list set
+ */
object ListSet extends ImmutableSetFactory[ListSet] {
- /** setCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A]
- override def newBuilder[A]: Builder[A, ListSet[A]] = new ListSetBuilder[A]
+ /**
+ * $setCanBuildFromInfo
+ */
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] =
+ setCanBuildFrom[A]
- private object EmptyListSet extends ListSet[Any] { }
+ @SerialVersionUID(5010379588739277132L)
+ private object EmptyListSet extends ListSet[Any]
private[collection] def emptyInstance: ListSet[Any] = EmptyListSet
-
- /** A custom builder because forgetfully adding elements one at
- * a time to a list backed set puts the "squared" in N^2. There is a
- * temporary space cost, but it's improbable a list backed set could
- * become large enough for this to matter given its pricy element lookup.
- *
- * This builder is reusable.
- */
- class ListSetBuilder[Elem](initial: ListSet[Elem]) extends ReusableBuilder[Elem, ListSet[Elem]] {
- def this() = this(empty[Elem])
- protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse
- protected val seen = new mutable.HashSet[Elem] ++= initial
-
- def +=(x: Elem): this.type = {
- if (!seen(x)) {
- elems += x
- seen += x
- }
- this
- }
- def clear() = { elems.clear() ; seen.clear() }
- def result() = elems.foldLeft(empty[Elem])(_ unchecked_+ _)
- }
}
-/** This class implements immutable sets using a list-based data
- * structure. Instances of `ListSet` represent
- * empty sets; they can be either created by calling the constructor
- * directly, or by applying the function `ListSet.empty`.
- *
- * @tparam A the type of the elements contained in this list set.
- *
- * @author Matthias Zenger
- * @version 1.0, 09/07/2003
- * @since 1
- * @define Coll immutable.ListSet
- * @define coll immutable list set
- * @define mayNotTerminateInf
- * @define willNotTerminateInf
- */
+/**
+ * This class implements immutable sets using a list-based data structure. List set iterators and
+ * traversal methods visit elements in the order whey were first inserted.
+ *
+ * Elements are stored internally in reversed insertion order, which means the newest element is at
+ * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and
+ * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which
+ * makes this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListSet` represent empty sets; they can be either created by calling the
+ * constructor directly, or by applying the function `ListSet.empty`.
+ *
+ * @tparam A the type of the elements contained in this list set
+ *
+ * @author Matthias Zenger
+ * @version 1.0, 09/07/2003
+ * @since 1
+ * @define Coll ListSet
+ * @define coll list set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(-8417059026623606218L)
sealed class ListSet[A] extends AbstractSet[A]
- with Set[A]
- with GenericSetTemplate[A, ListSet]
- with SetLike[A, ListSet[A]]
- with Serializable{ self =>
+ with Set[A]
+ with GenericSetTemplate[A, ListSet]
+ with SetLike[A, ListSet[A]]
+ with Serializable {
+
override def companion: GenericCompanion[ListSet] = ListSet
- /** Returns the number of elements in this set.
- *
- * @return number of set elements.
- */
override def size: Int = 0
override def isEmpty: Boolean = true
- /** Checks if this set contains element `elem`.
- *
- * @param elem the element to check for membership.
- * @return `'''true'''`, iff `elem` is contained in this set.
- */
def contains(elem: A): Boolean = false
- /** This method creates a new set with an additional element.
- */
- def + (elem: A): ListSet[A] = new Node(elem)
-
- /** `-` can be used to remove a single element.
- */
- def - (elem: A): ListSet[A] = this
+ def +(elem: A): ListSet[A] = new Node(elem)
+ def -(elem: A): ListSet[A] = this
- /** If we are bulk adding elements and desire a runtime measured in
- * sub-interstellar time units, we better find a way to avoid traversing
- * the collection on each element. That's what the custom builder does,
- * so we take the easy way out and add ourselves and the argument to
- * a new builder.
- */
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else (new ListSet.ListSetBuilder(this) ++= xs.seq).result()
-
- private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
- private[ListSet] def unchecked_outer: ListSet[A] =
- throw new NoSuchElementException("Empty ListSet has no outer pointer")
-
- /** Creates a new iterator over all elements contained in this set.
- *
- * @throws java.util.NoSuchElementException
- * @return the new iterator
- */
- def iterator: Iterator[A] = new AbstractIterator[A] {
- var that: ListSet[A] = self
- def hasNext = that.nonEmpty
- def next: A =
- if (hasNext) {
- val res = that.head
- that = that.tail
- res
+ else (repr /: xs) (_ + _)
+
+ def iterator: Iterator[A] = {
+ def reverseList = {
+ var curr: ListSet[A] = this
+ var res: List[A] = Nil
+ while (!curr.isEmpty) {
+ res = curr.elem :: res
+ curr = curr.next
}
- else Iterator.empty.next()
+ res
+ }
+ reverseList.iterator
}
- /**
- * @throws java.util.NoSuchElementException
- */
- override def head: A = throw new NoSuchElementException("Set has no elements")
+ protected def elem: A = throw new NoSuchElementException("elem of empty set")
+ protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set")
- /**
- * @throws java.util.NoSuchElementException
- */
- override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
override def stringPrefix = "ListSet"
- /** Represents an entry in the `ListSet`.
- */
- protected class Node(override val head: A) extends ListSet[A] with Serializable {
- override private[ListSet] def unchecked_outer = self
+ /**
+ * Represents an entry in the `ListSet`.
+ */
+ @SerialVersionUID(-787710309854855049L)
+ protected class Node(override protected val elem: A) extends ListSet[A] with Serializable {
- /** Returns the number of elements in this set.
- *
- * @return number of set elements.
- */
override def size = sizeInternal(this, 0)
- @tailrec private def sizeInternal(n: ListSet[A], acc: Int): Int =
+
+ @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int =
if (n.isEmpty) acc
- else sizeInternal(n.unchecked_outer, acc + 1)
+ else sizeInternal(n.next, acc + 1)
- /** Checks if this set is empty.
- *
- * @return true, iff there is no element in the set.
- */
override def isEmpty: Boolean = false
- /** Checks if this set contains element `elem`.
- *
- * @param e the element to check for membership.
- * @return `'''true'''`, iff `elem` is contained in this set.
- */
override def contains(e: A) = containsInternal(this, e)
- @tailrec private def containsInternal(n: ListSet[A], e: A): Boolean =
- !n.isEmpty && (n.head == e || containsInternal(n.unchecked_outer, e))
- /** This method creates a new set with an additional element.
- */
+ @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean =
+ !n.isEmpty && (n.elem == e || containsInternal(n.next, e))
+
override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e)
- /** `-` can be used to remove a single element from a set.
- */
- override def -(e: A): ListSet[A] = if (e == head) self else {
- val tail = self - e; new tail.Node(head)
- }
+ override def -(e: A): ListSet[A] = removeInternal(e, this, Nil)
- override def tail: ListSet[A] = self
- }
+ @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.elem) (cur.next /: acc) { case (t, h) => new t.Node(h.elem) }
+ else removeInternal(k, cur.next, cur :: acc)
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
+ override protected def next: ListSet[A] = ListSet.this
+
+ override def last: A = elem
+ override def init: ListSet[A] = next
+ }
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index d92db68912..8a9df0e862 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -100,11 +100,13 @@ self =>
/** Return all lines in this string in an iterator, including trailing
* line end characters.
*
- * The number of strings returned is one greater than the number of line
- * end characters in this string. For an empty string, a single empty
- * line is returned. A line end character is one of
- * - `LF` - line feed (`0x0A` hex)
- * - `FF` - form feed (`0x0C` hex)
+ * This method is analogous to `s.split(EOL).toIterator`,
+ * except that any existing line endings are preserved in the result strings,
+ * and the empty string yields an empty iterator.
+ *
+ * A line end character is one of
+ * - `LF` - line feed (`0x0A`)
+ * - `FF` - form feed (`0x0C`)
*/
def linesWithSeparators: Iterator[String] = new AbstractIterator[String] {
val str = self.toString
@@ -121,14 +123,14 @@ self =>
}
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e., apply `.stripLineEnd` to all lines
+ * end characters; i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
def lines: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e., apply `.stripLineEnd` to all lines
+ * end characters; i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
@deprecated("Use `lines` instead.","2.11.0")
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b4112c03dd..d5b7673c37 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -200,9 +200,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
- def compare(x: A, y: A) = ord.compare(y, x)
- })
+ val revq = new PriorityQueue[A]()(ord.reverse)
for (i <- 1 until resarr.length) revq += resarr(i)
revq
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index b10aad0ecc..d9d3d572e8 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -29,8 +29,8 @@ import scala.reflect.ClassTag
* val f: Future[String] = Future {
* s + " future!"
* }
- * f onSuccess {
- * case msg => println(msg)
+ * f foreach {
+ * msg => println(msg)
* }
* }}}
*
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index 8aa4073a51..64f491d7f0 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -13,21 +13,136 @@ package control
import scala.reflect.{ ClassTag, classTag }
import scala.language.implicitConversions
-
/** Classes representing the components of exception handling.
- * Each class is independently composable. Some example usages:
+ *
+ * Each class is independently composable.
+ *
+ * This class differs from [[scala.util.Try]] in that it focuses on composing exception handlers rather than
+ * composing behavior. All behavior should be composed first and fed to a [[Catch]] object using one of the
+ * `opt`, `either` or `withTry` methods. Taken together the classes provide a DSL for composing catch and finally
+ * behaviors.
+ *
+ * === Examples ===
+ *
+ * Create a `Catch` which handles specified exceptions.
* {{{
* import scala.util.control.Exception._
* import java.net._
*
* val s = "http://www.scala-lang.org/"
- * val x1 = catching(classOf[MalformedURLException]) opt new URL(s)
- * val x2 = catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
+ *
+ * // Some(http://www.scala-lang.org/)
+ * val x1: Option[URL] = catching(classOf[MalformedURLException]) opt new URL(s)
+ *
+ * // Right(http://www.scala-lang.org/)
+ * val x2: Either[Throwable,URL] =
+ * catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
+ *
+ * // Success(http://www.scala-lang.org/)
+ * val x3: Try[URL] = catching(classOf[MalformedURLException], classOf[NullPointerException]) withTry new URL(s)
+ *
+ * val defaultUrl = new URL("http://example.com")
+ * // URL(http://example.com) because htt/xx throws MalformedURLException
+ * val x4: URL = failAsValue(classOf[MalformedURLException])(defaultUrl)(new URL("htt/xx"))
+ * }}}
+ *
+ * Create a `Catch` which logs exceptions using `handling` and `by`.
+ * {{{
+ * def log(t: Throwable): Unit = t.printStackTrace
+ *
+ * val withThrowableLogging: Catch[Unit] = handling(classOf[MalformedURLException]) by (log)
+ *
+ * def printUrl(url: String) : Unit = {
+ * val con = new URL(url) openConnection()
+ * val source = scala.io.Source.fromInputStream(con.getInputStream())
+ * source.getLines.foreach(println)
+ * }
+ *
+ * val badUrl = "htt/xx"
+ * // Prints stacktrace,
+ * // java.net.MalformedURLException: no protocol: htt/xx
+ * // at java.net.URL.<init>(URL.java:586)
+ * withThrowableLogging { printUrl(badUrl) }
+ *
+ * val goodUrl = "http://www.scala-lang.org/"
+ * // Prints page content,
+ * // &lt;!DOCTYPE html&gt;
+ * // &lt;html&gt;
+ * withThrowableLogging { printUrl(goodUrl) }
+ * }}}
+ *
+ * Use `unwrapping` to create a `Catch` that unwraps exceptions before rethrowing.
+ * {{{
+ * class AppException(cause: Throwable) extends RuntimeException(cause)
+ *
+ * val unwrappingCatch: Catch[Nothing] = unwrapping(classOf[AppException])
+ *
+ * def calcResult: Int = throw new AppException(new NullPointerException)
+ *
+ * // Throws NPE not AppException,
+ * // java.lang.NullPointerException
+ * // at .calcResult(&lt;console&gt;:17)
+ * val result = unwrappingCatch(calcResult)
* }}}
*
- * This class differs from `scala.util.Try` in that it focuses on composing exception handlers rather than
- * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
- * `opt` or `either` methods.
+ * Use `failAsValue` to provide a default when a specified exception is caught.
+ *
+ * {{{
+ * val inputDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0)
+ * val candidatePick = "seven" // scala.io.StdIn.readLine()
+ *
+ * // Int = 0
+ * val pick = inputDefaulting(candidatePick.toInt)
+ * }}}
+ *
+ * Compose multiple `Catch`s with `or` to build a `Catch` that provides default values varied by exception.
+ * {{{
+ * val formatDefaulting: Catch[Int] = failAsValue(classOf[NumberFormatException])(0)
+ * val nullDefaulting: Catch[Int] = failAsValue(classOf[NullPointerException])(-1)
+ * val otherDefaulting: Catch[Int] = nonFatalCatch withApply(_ => -100)
+ *
+ * val combinedDefaulting: Catch[Int] = formatDefaulting or nullDefaulting or otherDefaulting
+ *
+ * def p(s: String): Int = s.length * s.toInt
+ *
+ * // Int = 0
+ * combinedDefaulting(p("tenty-nine"))
+ *
+ * // Int = -1
+ * combinedDefaulting(p(null: String))
+ *
+ * // Int = -100
+ * combinedDefaulting(throw new IllegalStateException)
+ *
+ * // Int = 22
+ * combinedDefaulting(p("11"))
+ * }}}
+ *
+ * @groupname composition-catch Catch behavior composition
+ * @groupprio composition-catch 10
+ * @groupdesc composition-catch Build Catch objects from exception lists and catch logic
+ *
+ * @groupname composition-finally Finally behavior composition
+ * @groupprio composition-finally 20
+ * @groupdesc composition-finally Build Catch objects from finally logic
+ *
+ * @groupname canned-behavior General purpose catch objects
+ * @groupprio canned-behavior 30
+ * @groupdesc canned-behavior Catch objects with predefined behavior. Use combinator methods to compose additional behavior.
+ *
+ * @groupname dsl DSL behavior composition
+ * @groupprio dsl 40
+ * @groupdesc dsl Expressive Catch behavior composition
+ *
+ * @groupname composition-catch-promiscuously Promiscuous Catch behaviors
+ * @groupprio composition-catch-promiscuously 50
+ * @groupdesc composition-catch-promiscuously Useful if catching `ControlThrowable` or `InterruptedException` is required.
+ *
+ * @groupname logic-container Logic Containers
+ * @groupprio logic-container 60
+ * @groupdesc logic-container Containers for catch and finally behavior.
+ *
+ * @define protectedExceptions `ControlThrowable` or `InterruptedException`
*
* @author Paul Phillips
*/
@@ -51,6 +166,7 @@ object Exception {
/** !!! Not at all sure of every factor which goes into this,
* and/or whether we need multiple standard variations.
+ * @return true if `x` is $protectedExceptions otherwise false.
*/
def shouldRethrow(x: Throwable): Boolean = x match {
case _: ControlThrowable => true
@@ -70,7 +186,9 @@ object Exception {
override def toString() = name + "(" + desc + ")"
}
- /** A container class for finally code. */
+ /** A container class for finally code.
+ * @group logic-container
+ */
class Finally private[Exception](body: => Unit) extends Described {
protected val name = "Finally"
@@ -87,6 +205,7 @@ object Exception {
* @param pf Partial function used when applying catch logic to determine result value
* @param fin Finally logic which if defined will be invoked after catch logic
* @param rethrow Predicate on throwables determining when to rethrow a caught [[Throwable]]
+ * @group logic-container
*/
class Catch[+T](
val pf: Catcher[T],
@@ -153,23 +272,30 @@ object Exception {
final def nonFatalCatcher[T]: Catcher[T] = mkThrowableCatcher({ case NonFatal(_) => true; case _ => false }, throw _)
final def allCatcher[T]: Catcher[T] = mkThrowableCatcher(_ => true, throw _)
- /** The empty `Catch` object. */
+ /** The empty `Catch` object.
+ * @group canned-behavior
+ **/
final val noCatch: Catch[Nothing] = new Catch(nothingCatcher) withDesc "<nothing>"
- /** A `Catch` object which catches everything. */
+ /** A `Catch` object which catches everything.
+ * @group canned-behavior
+ **/
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
- /** A `Catch` object which catches non-fatal exceptions. */
+ /** A `Catch` object which catches non-fatal exceptions.
+ * @group canned-behavior
+ **/
final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
/** Creates a `Catch` object which will catch any of the supplied exceptions.
* Since the returned `Catch` object has no specific logic defined and will simply
- * rethrow the exceptions it catches, you will typically want to call `opt` or
- * `either` on the return value, or assign custom logic by calling "withApply".
+ * rethrow the exceptions it catches, you will typically want to call `opt`,
+ * `either` or `withTry` on the return value, or assign custom logic by calling "withApply".
*
* Note that `Catch` objects automatically rethrow `ControlExceptions` and others
* which should only be caught in exceptional circumstances. If you really want
* to catch exactly what you specify, use `catchingPromiscuously` instead.
+ * @group composition-catch
*/
def catching[T](exceptions: Class[_]*): Catch[T] =
new Catch(pfFromExceptions(exceptions : _*)) withDesc (exceptions map (_.getName) mkString ", ")
@@ -178,42 +304,56 @@ object Exception {
/** Creates a `Catch` object which will catch any of the supplied exceptions.
* Unlike "catching" which filters out those in shouldRethrow, this one will
- * catch whatever you ask of it: `ControlThrowable`, `InterruptedException`,
- * `OutOfMemoryError`, you name it.
+ * catch whatever you ask of it including $protectedExceptions.
+ * @group composition-catch-promiscuously
*/
def catchingPromiscuously[T](exceptions: Class[_]*): Catch[T] = catchingPromiscuously(pfFromExceptions(exceptions : _*))
def catchingPromiscuously[T](c: Catcher[T]): Catch[T] = new Catch(c, None, _ => false)
- /** Creates a `Catch` object which catches and ignores any of the supplied exceptions. */
+ /** Creates a `Catch` object which catches and ignores any of the supplied exceptions.
+ * @group composition-catch
+ */
def ignoring(exceptions: Class[_]*): Catch[Unit] =
catching(exceptions: _*) withApply (_ => ())
- /** Creates a `Catch` object which maps all the supplied exceptions to `None`. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to `None`.
+ * @group composition-catch
+ */
def failing[T](exceptions: Class[_]*): Catch[Option[T]] =
catching(exceptions: _*) withApply (_ => None)
- /** Creates a `Catch` object which maps all the supplied exceptions to the given value. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to the given value.
+ * @group composition-catch
+ */
def failAsValue[T](exceptions: Class[_]*)(value: => T): Catch[T] =
catching(exceptions: _*) withApply (_ => value)
+ class By[T,R](f: T => R) {
+ def by(x: T): R = f(x)
+ }
+
/** Returns a partially constructed `Catch` object, which you must give
- * an exception handler function as an argument to `by`. Example:
+ * an exception handler function as an argument to `by`.
+ * @example
* {{{
- * handling(ex1, ex2) by (_.printStackTrace)
+ * handling(classOf[MalformedURLException], classOf[NullPointerException]) by (_.printStackTrace)
* }}}
+ * @group dsl
*/
- class By[T,R](f: T => R) {
- def by(x: T): R = f(x)
- }
+ // TODO: Add return type
def handling[T](exceptions: Class[_]*) = {
def fun(f: Throwable => T) = catching(exceptions: _*) withApply f
new By[Throwable => T, Catch[T]](fun _)
}
- /** Returns a `Catch` object with no catch logic and the argument as `Finally`. */
+ /** Returns a `Catch` object with no catch logic and the argument as the finally logic.
+ * @group composition-finally
+ */
def ultimately[T](body: => Unit): Catch[T] = noCatch andFinally body
- /** Creates a `Catch` object which unwraps any of the supplied exceptions. */
+ /** Creates a `Catch` object which unwraps any of the supplied exceptions.
+ * @group composition-catch
+ */
def unwrapping[T](exceptions: Class[_]*): Catch[T] = {
def unwrap(x: Throwable): Throwable =
if (wouldMatch(x, exceptions) && x.getCause != null) unwrap(x.getCause)
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 6d3d015b1a..bd55fb5d04 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -182,6 +182,9 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year"
* }}}
*
+ * Group names supplied to the constructor are preferred to inline group names
+ * when retrieving matched groups by name. Not all platforms support inline names.
+ *
* This constructor does not support options as flags, which must be
* supplied as inline flags in the pattern string: `(?idmsux-idmsux)`.
*
@@ -578,6 +581,9 @@ object Regex {
*/
trait MatchData {
+ /** Basically, wraps a platform Matcher. */
+ protected def matcher: Matcher
+
/** The source from which the match originated */
val source: CharSequence
@@ -650,16 +656,25 @@ object Regex {
private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex
- /** Returns the group with given name.
+ /** Returns the group with the given name.
+ *
+ * Uses explicit group names when supplied; otherwise,
+ * queries the underlying implementation for inline named groups.
+ * Not all platforms support inline group names.
*
* @param id The group name
* @return The requested group
- * @throws NoSuchElementException if the requested group name is not defined
+ * @throws IllegalArgumentException if the requested group name is not defined
*/
- def group(id: String): String = nameToIndex.get(id) match {
- case None => throw new NoSuchElementException("group name "+id+" not defined")
- case Some(index) => group(index)
- }
+ def group(id: String): String = (
+ if (groupNames.isEmpty)
+ matcher group id
+ else
+ nameToIndex.get(id) match {
+ case Some(index) => group(index)
+ case None => matcher group id
+ }
+ )
/** The matched string; equivalent to `matched.toString`. */
override def toString = matched
@@ -667,7 +682,7 @@ object Regex {
/** Provides information about a successful match. */
class Match(val source: CharSequence,
- private[matching] val matcher: Matcher,
+ protected[matching] val matcher: Matcher,
val groupNames: Seq[String]) extends MatchData {
/** The index of the first matched character. */
diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 290b7b434e..532dfd2a73 100644
--- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -1,10 +1,10 @@
package scala.tools.partest
-import scala.tools.nsc.util.JavaClassPath
import scala.collection.JavaConverters._
-import scala.tools.asm.{ClassWriter, ClassReader}
+import scala.tools.asm.{ClassReader, ClassWriter}
import scala.tools.asm.tree._
-import java.io.{File => JFile, InputStream}
+import java.io.{InputStream, File => JFile}
+
import AsmNode._
/**
@@ -125,12 +125,16 @@ abstract class BytecodeTest {
cn
}
- protected lazy val classpath: JavaClassPath = {
- import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ protected lazy val classpath: scala.tools.nsc.util.ClassPath = {
+ import scala.tools.nsc.classpath.AggregateClassPath
+ import scala.tools.nsc.classpath.ClassPathFactory
import scala.tools.util.PathResolver.Defaults
+ import scala.tools.nsc.Settings
// logic inspired by scala.tools.util.PathResolver implementation
- val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
- new JavaClassPath(containers, DefaultJavaContext)
+ // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath
+ val factory = new ClassPathFactory(new Settings())
+ val containers = factory.classesInExpandedPath(Defaults.javaUserClassPath)
+ new AggregateClassPath(containers)
}
}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index c069e2c198..412c49f571 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -113,7 +113,8 @@ trait Erasure {
def apply(tp: Type): Type = tp match {
case ConstantType(ct) =>
- if (ct.tag == ClazzTag) ConstantType(Constant(apply(ct.typeValue)))
+ // erase classOf[List[_]] to classOf[List]. special case for classOf[Unit], avoid erasing to classOf[BoxedUnit].
+ if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != UnitClass) ConstantType(Constant(apply(ct.typeValue)))
else tp
case st: ThisType if st.sym.isPackageClass =>
tp
@@ -165,7 +166,7 @@ trait Erasure {
/** The erasure |T| of a type T. This is:
*
- * - For a constant type, itself.
+ * - For a constant type classOf[T], classOf[|T|], unless T is Unit. For any other constant type, itself.
* - For a type-bounds structure, the erasure of its upper bound.
* - For every other singleton type, the erasure of its supertype.
* - For a typeref scala.Array+[T] where T is an abstract type, AnyRef.
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 37b07ce775..9b0d66f41c 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -613,7 +613,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
loadBytes[String]("scala.reflect.ScalaSignature") match {
case Some(ssig) =>
info(s"unpickling Scala $clazz and $module, owner = ${clazz.owner}")
- val bytes = ssig.getBytes
+ val bytes = ssig.getBytes(java.nio.charset.StandardCharsets.UTF_8)
val len = ByteCodecs.decode(bytes)
assignAssociatedFile(clazz, module, jclazz)
unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
@@ -622,7 +622,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
info(s"unpickling Scala $clazz and $module with long Scala signature")
- val encoded = slsig flatMap (_.getBytes)
+ val encoded = slsig flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8))
val len = ByteCodecs.decode(encoded)
val decoded = encoded.take(len)
assignAssociatedFile(clazz, module, jclazz)
diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
index 1f2b0952e7..95964e18d9 100644
--- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
+++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala
@@ -11,10 +11,9 @@ import java.util.{Collection => JCollection, List => JList}
import _root_.jline.{console => jconsole}
import jline.console.ConsoleReader
-import jline.console.completer.{CompletionHandler, Completer}
+import jline.console.completer.{CandidateListCompletionHandler, Completer, CompletionHandler}
import jconsole.history.{History => JHistory}
-
import scala.tools.nsc.interpreter
import scala.tools.nsc.interpreter.{Completion, NoCompletion}
import scala.tools.nsc.interpreter.Completion.Candidates
@@ -133,32 +132,15 @@ private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter
newCursor
}
}
+ getCompletionHandler match {
+ case clch: CandidateListCompletionHandler => clch.setPrintSpaceAfterFullCompletion(false)
+ }
completion match {
case NoCompletion => ()
case _ => this addCompleter completer
}
- // This is a workaround for https://github.com/jline/jline2/issues/208
- // and should not be necessary once we upgrade to JLine 2.13.1
- ///
- // Test by:
- // scala> {" ".char}<LEFT><TAB>
- //
- // And checking we don't get an extra } on the line.
- ///
- val handler = getCompletionHandler
- setCompletionHandler(new CompletionHandler {
- override def complete(consoleReader: ConsoleReader, list: JList[CharSequence], i: Int): Boolean = {
- try {
- handler.complete(consoleReader, list, i)
- } finally if (getCursorBuffer.cursor != getCursorBuffer.length()) {
- print(" ")
- getCursorBuffer.write(' ')
- backspace()
- }
- }
- })
setAutoprintThreshold(400) // max completion candidates without warning
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 893bde42ab..8c91242b36 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -11,18 +11,18 @@ import PartialFunction.cond
import scala.language.implicitConversions
import scala.beans.BeanProperty
import scala.collection.mutable
-import scala.concurrent.{ Future, ExecutionContext }
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
-import scala.tools.util.PathResolverFactory
+import scala.concurrent.{ExecutionContext, Future}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
+import scala.tools.nsc.typechecker.{StructuredTypeStrings, TypeStrings}
import scala.tools.nsc.util._
import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
-import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import javax.script.{AbstractScriptEngine, Bindings, Compilable, CompiledScript, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException}
import java.net.URL
+import scala.tools.util.PathResolver
/** An interpreter for Scala code.
*
@@ -91,7 +91,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
- else PathResolverFactory.create(settings).resultAsURLs // the compiler's classpath
+ else new PathResolver(settings).resultAsURLs // the compiler's classpath
)
def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index fdbd93d862..0cda9c4da3 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -127,7 +127,11 @@ trait Imports {
case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
case rh :: rest =>
import rh.handler._
- val newWanted = wanted ++ referencedNames -- definedNames -- importedNames
+ val augment = rh match {
+ case ReqAndHandler(_, _: ImportHandler) => referencedNames // for "import a.b", add "a" to names to be resolved
+ case _ => Nil
+ }
+ val newWanted = wanted ++ augment -- definedNames -- importedNames
rh :: select(rest, newWanted)
}
}
@@ -161,6 +165,8 @@ trait Imports {
val tempValLines = mutable.Set[Int]()
for (ReqAndHandler(req, handler) <- reqsToUse) {
val objName = req.lineRep.readPathInstance
+ if (isReplTrace)
+ code.append(ss"// $objName definedNames ${handler.definedNames}, curImps $currentImps\n")
handler match {
case h: ImportHandler if checkHeader(h) =>
header.clear()
@@ -175,21 +181,20 @@ trait Imports {
currentImps ++= x.importedNames
case x if isClassBased =>
- for (imv <- x.definedNames) {
- if (!currentImps.contains(imv)) {
- x match {
- case _: ClassHandler =>
- code.append("import " + objName + req.accessPath + ".`" + imv + "`\n")
- case _ =>
- val valName = req.lineRep.packageName + req.lineRep.readName
- if (!tempValLines.contains(req.lineRep.lineId)) {
- code.append(s"val $valName: ${objName}.type = $objName\n")
- tempValLines += req.lineRep.lineId
- }
- code.append(s"import $valName${req.accessPath}.`$imv`;\n")
- }
- currentImps += imv
+ for (sym <- x.definedSymbols) {
+ maybeWrap(sym.name)
+ x match {
+ case _: ClassHandler =>
+ code.append(s"import ${objName}${req.accessPath}.`${sym.name}`\n")
+ case _ =>
+ val valName = s"${req.lineRep.packageName}${req.lineRep.readName}"
+ if (!tempValLines.contains(req.lineRep.lineId)) {
+ code.append(s"val $valName: ${objName}.type = $objName\n")
+ tempValLines += req.lineRep.lineId
+ }
+ code.append(s"import ${valName}${req.accessPath}.`${sym.name}`\n")
}
+ currentImps += sym.name
}
// For other requests, import each defined name.
// import them explicitly instead of with _, so that
diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
index e3dc72b717..b9a4054ffc 100644
--- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
+++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala
@@ -7,9 +7,7 @@ package scala.tools.nsc.interpreter
import scala.reflect.internal.util.RangePosition
import scala.reflect.io.AbstractFile
import scala.tools.nsc.backend.JavaPlatform
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-import scala.tools.nsc.util.{ClassPath, MergedClassPath, DirectoryClassPath}
+import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.{interactive, Settings}
import scala.tools.nsc.reporters.StoreReporter
import scala.tools.nsc.classpath._
@@ -58,12 +56,8 @@ trait PresentationCompilation {
*/
def newPresentationCompiler(): interactive.Global = {
def mergedFlatClasspath = {
- val replOutClasspath = FlatClassPathFactory.newClassPath(replOutput.dir, settings)
- AggregateFlatClassPath(replOutClasspath :: global.platform.flatClassPath :: Nil)
- }
- def mergedRecursiveClasspath = {
- val replOutClasspath: DirectoryClassPath = new DirectoryClassPath(replOutput.dir, DefaultJavaContext)
- new MergedClassPath[AbstractFile](replOutClasspath :: global.platform.classPath :: Nil, DefaultJavaContext)
+ val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings)
+ AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil)
}
def copySettings: Settings = {
val s = new Settings(_ => () /* ignores "bad option -nc" errors, etc */)
@@ -74,16 +68,9 @@ trait PresentationCompilation {
val storeReporter: StoreReporter = new StoreReporter
val interactiveGlobal = new interactive.Global(copySettings, storeReporter) { self =>
override lazy val platform: ThisPlatform = {
- if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat) {
- new JavaPlatform {
- val global: self.type = self
- override private[nsc] lazy val flatClassPath: FlatClassPath = mergedFlatClasspath
- }
- } else {
- new JavaPlatform {
- val global: self.type = self
- override def classPath: ClassPath[AbstractFile] = mergedRecursiveClasspath
- }
+ new JavaPlatform {
+ val global: self.type = self
+ override private[nsc] lazy val classPath: ClassPath = mergedFlatClasspath
}
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 56f1e65376..97b32bfa86 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -88,9 +88,6 @@ package object interpreter extends ReplConfig with ReplStrings {
}
}
- if (filtered.isEmpty)
- return "No implicits have been imported other than those in Predef."
-
filtered foreach {
case (source, syms) =>
p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
@@ -126,7 +123,14 @@ package object interpreter extends ReplConfig with ReplStrings {
}
p("")
}
- ""
+
+ if (filtered.nonEmpty)
+ "" // side-effects above
+ else if (global.settings.nopredef || global.settings.noimports)
+ "No implicits have been imported."
+ else
+ "No implicits have been imported other than those in Predef."
+
}
def kindCommandInternal(expr: String, verbose: Boolean): Unit = {
@@ -198,13 +202,14 @@ package object interpreter extends ReplConfig with ReplStrings {
}
}
- /* debug assist
+ /* An s-interpolator that uses `stringOf(arg)` instead of `String.valueOf(arg)`. */
private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal {
import StringContext._, runtime.ScalaRunTime.stringOf
def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf)
- } debug assist */
+ }
+ /* Try (body) lastly (more) */
private[nsc] implicit class `try lastly`[A](val t: Try[A]) extends AnyVal {
- private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
- def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ private def effect[X](last: => Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: => Unit): Try[A] = t transform (effect(last) _, effect(last) _)
}
}
diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index b38aadd328..63d3b4ce27 100644
--- a/src/scaladoc/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -14,8 +14,8 @@ import org.apache.tools.ant.Project
import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
+import scala.tools.nsc.ScalaDocReporter
import scala.tools.nsc.doc.Settings
-import scala.tools.nsc.reporters.ConsoleReporter
/** An Ant task to document Scala code.
*
@@ -666,7 +666,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Performs the compilation. */
override def execute() = {
val (docSettings, sourceFiles) = initialize
- val reporter = new ConsoleReporter(docSettings)
+ val reporter = new ScalaDocReporter(docSettings)
try {
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(sourceFiles.map (_.toString))
diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index bd00c27f7b..e266f7beea 100644
--- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -8,7 +8,8 @@ package scala.tools.nsc
import scala.tools.nsc.doc.DocFactory
import scala.tools.nsc.reporters.ConsoleReporter
-import scala.reflect.internal.util.FakePos
+import scala.reflect.internal.Reporter
+import scala.reflect.internal.util.{ FakePos, NoPosition, Position }
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
@@ -38,23 +39,43 @@ class ScalaDoc {
reporter.echo(command.usageMsg)
else
try { new DocFactory(reporter, docSettings) document command.files }
- catch {
- case ex @ FatalError(msg) =>
- if (docSettings.debug.value) ex.printStackTrace()
- reporter.error(null, "fatal error: " + msg)
- }
- finally reporter.printSummary()
+ catch {
+ case ex @ FatalError(msg) =>
+ if (docSettings.debug.value) ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
+ }
+ finally reporter.printSummary()
!reporter.reallyHasErrors
}
}
+/** The Scaladoc reporter adds summary messages to the `ConsoleReporter`
+ *
+ * Use the `summaryX` methods to add unique summarizing message to the end of
+ * the run.
+ */
class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) {
+ import scala.collection.mutable.LinkedHashMap
// need to do sometimes lie so that the Global instance doesn't
// trash all the symbols just because there was an error
override def hasErrors = false
def reallyHasErrors = super.hasErrors
+
+ private[this] val delayedMessages: LinkedHashMap[(Position, String), () => Unit] =
+ LinkedHashMap.empty
+
+ /** Eliminates messages if both `pos` and `msg` are equal to existing element */
+ def addDelayedMessage(pos: Position, msg: String, print: () => Unit): Unit =
+ delayedMessages += ((pos, msg) -> print)
+
+ def printDelayedMessages(): Unit = delayedMessages.values.foreach(_.apply())
+
+ override def printSummary(): Unit = {
+ printDelayedMessages()
+ super.printSummary()
+ }
}
object ScalaDoc extends ScalaDoc {
@@ -70,4 +91,20 @@ object ScalaDoc extends ScalaDoc {
def main(args: Array[String]): Unit = sys exit {
if (process(args)) 0 else 1
}
+
+ implicit class SummaryReporter(val rep: Reporter) extends AnyVal {
+ /** Adds print lambda to ScalaDocReporter, executes it on other reporter */
+ private[this] def summaryMessage(pos: Position, msg: String, print: () => Unit): Unit = rep match {
+ case r: ScalaDocReporter => r.addDelayedMessage(pos, msg, print)
+ case _ => print()
+ }
+
+ def summaryEcho(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.echo(pos, msg))
+ def summaryError(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.error(pos, msg))
+ def summaryWarning(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.warning(pos, msg))
+
+ def summaryEcho(msg: String): Unit = summaryEcho(NoPosition, msg)
+ def summaryError(msg: String): Unit = summaryError(NoPosition, msg)
+ def summaryWarning(msg: String): Unit = summaryWarning(NoPosition, msg)
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index fb6c39d7e3..8c646be9c6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package doc
-import scala.util.control.ControlThrowable
import reporters.Reporter
+import scala.util.control.ControlThrowable
import scala.reflect.internal.util.BatchSourceFile
/** A documentation processor controls the process of generating Scala
@@ -105,7 +105,19 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
def generate() = {
import doclet._
val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet
- val docletInstance = docletClass.newInstance().asInstanceOf[Generator]
+ val docletInstance =
+ docletClass
+ .getConstructors
+ .find { constr =>
+ constr.getParameterTypes.length == 1 &&
+ constr.getParameterTypes.apply(0) == classOf[scala.reflect.internal.Reporter]
+ }
+ .map(_.newInstance(reporter))
+ .getOrElse{
+ reporter.warning(null, "Doclets should be created with the Reporter constructor, otherwise logging reporters will not be shared by the creating parent")
+ docletClass.newInstance()
+ }
+ .asInstanceOf[Generator]
docletInstance match {
case universer: Universer =>
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index 8cd8a7ee09..d3b4bf8ff5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -295,7 +295,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
case line :: ls if (lastTagKey.isDefined) => {
- val newtags = if (!line.isEmpty) {
+ val newtags = if (!line.isEmpty || inCodeBlock) {
val key = lastTagKey.get
val value =
((tags get key): @unchecked) match {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
index 541266e4cc..73a854e995 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
@@ -7,14 +7,19 @@ package scala.tools.nsc
package doc
package html
+import scala.reflect.internal.Reporter
import doclet._
/** The default doclet used by the scaladoc command line tool
* when no user-provided doclet is provided. */
-class Doclet extends Generator with Universer {
+class Doclet(reporter: Reporter) extends Generator with Universer {
- def generateImpl() {
- new html.HtmlFactory(universe, new ScalaDocReporter(universe.settings)).generate()
- }
+ @deprecated("Doclets should be created with the Reporter constructor. Otherwise logging reporters will not be shared by the creating parent", "2.12.0")
+ def this() = this(null)
+ def generateImpl() =
+ new html.HtmlFactory(
+ universe,
+ if (reporter != null) reporter else new ScalaDocReporter(universe.settings)
+ ).generate()
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 88b84be65e..62620057cb 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -12,12 +12,13 @@ import java.io.{ File => JFile }
import io.{ Streamable, Directory }
import scala.collection._
import page.diagram._
+import scala.reflect.internal.Reporter
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
* @author Gilles Dubochet */
-class HtmlFactory(val universe: doc.Universe, val reporter: ScalaDocReporter) {
- import page.IndexScript
+class HtmlFactory(val universe: doc.Universe, val reporter: Reporter) {
+ import page.{IndexScript, EntityPage}
/** The character encoding to be used for generated Scaladoc sites.
* This value is currently always UTF-8. */
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 0f37f86b3e..6ad51f4f7e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -13,6 +13,7 @@ import base._
import base.comment._
import model._
+import scala.reflect.internal.Reporter
import scala.xml.NodeSeq
import scala.xml.Elem
import scala.xml.dtd.DocType
@@ -27,7 +28,7 @@ abstract class HtmlPage extends Page { thisPage =>
protected def title: String
/** ScalaDoc reporter for error handling */
- protected def reporter: ScalaDocReporter
+ protected def docletReporter: Reporter
/** The page description */
protected def description: String =
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
index 836d1b4b7d..9dd2c2184d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala
@@ -13,6 +13,7 @@ package page
import base._
import base.comment._
+import scala.reflect.internal.Reporter
import scala.collection.mutable
import scala.xml.{NodeSeq, Text, UnprefixedAttribute}
import scala.language.postfixOps
@@ -22,10 +23,12 @@ import model.diagram._
import diagram._
trait EntityPage extends HtmlPage {
+ import ScalaDoc.SummaryReporter
+
def universe: doc.Universe
def generator: DiagramGenerator
def tpl: DocTemplateEntity
- def reporter: ScalaDocReporter
+ def docletReporter: Reporter
override val path = templateToPath(tpl)
@@ -158,8 +161,7 @@ trait EntityPage extends HtmlPage {
val version = universe.settings.docversion.value
if (version.length > "XX.XX.XX-XXX".length) {
- reporter.warning(null,
- s"doc-version ($version) is too long to be displayed in the webview")
+ docletReporter.summaryWarning(s"doc-version ($version) was too long to be displayed in the webview, and will be left out. The max length is: XX.XX.XX-XXX")
""
} else version
}
@@ -1124,12 +1126,12 @@ object EntityPage {
uni: doc.Universe,
gen: DiagramGenerator,
docTpl: DocTemplateEntity,
- rep: ScalaDocReporter
+ rep: Reporter
): EntityPage = new EntityPage {
def universe = uni
def generator = gen
def tpl = docTpl
- def reporter = rep
+ def docletReporter = rep
}
/* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 3d2bfd7251..6a37bbc270 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -8,17 +8,12 @@
package scala
package tools.scalap
-import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
+import java.io.{ByteArrayOutputStream, OutputStreamWriter, PrintStream}
import scala.reflect.NameTransformer
import scala.tools.nsc.Settings
-import scala.tools.nsc.classpath.AggregateFlatClassPath
-import scala.tools.nsc.classpath.FlatClassPathFactory
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.nsc.util.ClassFileLookup
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
-import scala.tools.nsc.util.JavaClassPath
-import scala.tools.util.PathResolverFactory
+import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory}
+import scala.tools.nsc.util.ClassPath
+import scala.tools.util.PathResolver
import scalax.rules.scalasig._
/**The main object used to execute scalap on the command-line.
@@ -101,7 +96,7 @@ class Main {
/** Executes scalap with the given arguments and classpath for the
* class denoted by `classname`.
*/
- def process(args: Arguments, path: ClassFileLookup[AbstractFile])(classname: String): Unit = {
+ def process(args: Arguments, path: ClassPath)(classname: String): Unit = {
// find the classfile
val encName = classname match {
case "scala.AnyRef" => "java.lang.Object"
@@ -145,7 +140,6 @@ object Main extends Main {
val verbose = "-verbose"
val version = "-version"
- val classPathImplType = "-YclasspathImpl"
val disableFlatClassPathCaching = "-YdisableFlatCpCaching"
val logClassPath = "-Ylog-classpath"
}
@@ -183,7 +177,6 @@ object Main extends Main {
val settings = new Settings()
- arguments getArgument opts.classPathImplType foreach settings.YclasspathImpl.tryToSetFromPropertyValue
settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching
settings.Ylogcp.value = arguments contains opts.logClassPath
@@ -205,21 +198,16 @@ object Main extends Main {
.withOption(opts.help)
.withOptionalArg(opts.classpath)
.withOptionalArg(opts.cp)
- // TODO three temporary, hidden options to be able to test different classpath representations
- .withOptionalArg(opts.classPathImplType)
+ // TODO two temporary, hidden options to be able to test different classpath representations
.withOption(opts.disableFlatClassPathCaching)
.withOption(opts.logClassPath)
.parse(args)
private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match {
- case Some(cp) => settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat =>
- AggregateFlatClassPath(new FlatClassPathFactory(settings).classesInExpandedPath(cp))
- case ClassPathRepresentationType.Recursive =>
- new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- }
+ case Some(cp) =>
+ AggregateClassPath(new ClassPathFactory(settings).classesInExpandedPath(cp))
case _ =>
settings.classpath.value = "." // include '.' in the default classpath SI-6669
- PathResolverFactory.create(settings).result
+ new PathResolver(settings).result
}
}
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index ce3c8062d7..9a2162a906 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -353,7 +353,7 @@ defined class Term
scala> def f(e: Exp) = e match { // non-exhaustive warning here
case _:Fact => 3
}
-<console>:22: warning: match may not be exhaustive.
+<console>:18: warning: match may not be exhaustive.
It would fail on the following inputs: Exp(), Term()
def f(e: Exp) = e match { // non-exhaustive warning here
^
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index cb26446f40..1c5dd4828b 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -85,12 +85,12 @@ x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
x equals y: true, y equals x: true
-x = Map(buffers -> 20, layers -> 2, title -> 3)
-y = Map(buffers -> 20, layers -> 2, title -> 3)
+x = ListMap(buffers -> 20, layers -> 2, title -> 3)
+y = ListMap(buffers -> 20, layers -> 2, title -> 3)
x equals y: true, y equals x: true
-x = ListSet(5, 3)
-y = ListSet(5, 3)
+x = ListSet(3, 5)
+y = ListSet(3, 5)
x equals y: true, y equals x: true
x = Queue(a, b, c)
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index cb26446f40..1c5dd4828b 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -85,12 +85,12 @@ x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
x equals y: true, y equals x: true
-x = Map(buffers -> 20, layers -> 2, title -> 3)
-y = Map(buffers -> 20, layers -> 2, title -> 3)
+x = ListMap(buffers -> 20, layers -> 2, title -> 3)
+y = ListMap(buffers -> 20, layers -> 2, title -> 3)
x equals y: true, y equals x: true
-x = ListSet(5, 3)
-y = ListSet(5, 3)
+x = ListSet(3, 5)
+y = ListSet(3, 5)
x equals y: true, y equals x: true
x = Queue(a, b, c)
diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check
index 4f4a12bc13..8a5bd97ae3 100644
--- a/test/files/neg/constrs.check
+++ b/test/files/neg/constrs.check
@@ -7,7 +7,7 @@ constrs.scala:6: error: value u is not a member of object test
constrs.scala:10: error: called constructor's definition must precede calling constructor's definition
def this() = this("abc")
^
-constrs.scala:12: error: called constructor's definition must precede calling constructor's definition
+constrs.scala:12: error: constructor invokes itself
def this(x: Boolean) = this(x)
^
constrs.scala:16: error: type mismatch;
diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check
index b711e7acb1..7a7618a114 100644
--- a/test/files/neg/t4460a.check
+++ b/test/files/neg/t4460a.check
@@ -1,4 +1,4 @@
-t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition
+t4460a.scala:6: error: constructor invokes itself
def this() = this() // was binding to Predef.<init> !!
^
one error found
diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check
index f0e703fd10..9a621dbd5c 100644
--- a/test/files/neg/t4460b.check
+++ b/test/files/neg/t4460b.check
@@ -1,4 +1,4 @@
-t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition
+t4460b.scala:7: error: constructor invokes itself
def this() = this() // was binding to Predef.<init> !!
^
one error found
diff --git a/test/files/neg/t9045.check b/test/files/neg/t9045.check
new file mode 100644
index 0000000000..07d0e2dd74
--- /dev/null
+++ b/test/files/neg/t9045.check
@@ -0,0 +1,7 @@
+t9045.scala:3: error: constructor invokes itself
+ def this(axes: Array[Int]) = this(axes)
+ ^
+t9045.scala:6: error: called constructor's definition must precede calling constructor's definition
+ def this(d: Double) = this(d.toLong)
+ ^
+two errors found
diff --git a/test/files/neg/t9045.scala b/test/files/neg/t9045.scala
new file mode 100644
index 0000000000..e6710ab324
--- /dev/null
+++ b/test/files/neg/t9045.scala
@@ -0,0 +1,8 @@
+
+case class AffineImageShape(axes: Seq[Int]) {
+ def this(axes: Array[Int]) = this(axes)
+}
+class X(i: Int) {
+ def this(d: Double) = this(d.toLong)
+ def this(n: Long) = this(n.toInt)
+}
diff --git a/test/files/pos/constant-warning.check b/test/files/pos/constant-warning.check
new file mode 100644
index 0000000000..f7df2165d1
--- /dev/null
+++ b/test/files/pos/constant-warning.check
@@ -0,0 +1,4 @@
+constant-warning.scala:2: warning: Evaluation of a constant expression results in an arithmetic error: / by zero
+ val fails = 1 + 2 / (3 - 2 - 1)
+ ^
+one warning found
diff --git a/test/files/pos/constant-warning.flags b/test/files/pos/constant-warning.flags
new file mode 100644
index 0000000000..d00cbbe77b
--- /dev/null
+++ b/test/files/pos/constant-warning.flags
@@ -0,0 +1 @@
+-Xlint:constant
diff --git a/test/files/pos/constant-warning.scala b/test/files/pos/constant-warning.scala
new file mode 100644
index 0000000000..c8ca8823e7
--- /dev/null
+++ b/test/files/pos/constant-warning.scala
@@ -0,0 +1,3 @@
+object Test {
+ val fails = 1 + 2 / (3 - 2 - 1)
+}
diff --git a/test/files/pos/t9397.scala b/test/files/pos/t9397.scala
new file mode 100644
index 0000000000..3dbc6591d3
--- /dev/null
+++ b/test/files/pos/t9397.scala
@@ -0,0 +1,12 @@
+package foo.scala
+
+import scala.reflect.runtime.universe._
+
+object Foo {
+
+ def bar[T: TypeTag]() {
+ }
+
+ import foo._
+ bar[String]()
+}
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index c4ade79837..099a353e89 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -6,7 +6,6 @@ import scala.tools.reflect.ToolBox
import scala.reflect.api._
import scala.reflect.api.Trees
import scala.reflect.internal.Types
-import scala.util.matching.Regex
object Test extends App {
//val output = new ByteArrayOutputStream()
diff --git a/test/files/run/repl-classbased.check b/test/files/run/repl-classbased.check
new file mode 100644
index 0000000000..e11fc170e5
--- /dev/null
+++ b/test/files/run/repl-classbased.check
@@ -0,0 +1,23 @@
+
+scala> case class K(s: String)
+defined class K
+
+scala> class C { implicit val k: K = K("OK?"); override def toString = s"C($k)" }
+defined class C
+
+scala> val c = new C
+c: C = C(K(OK?))
+
+scala> import c.k
+import c.k
+
+scala> implicitly[K]
+res0: K = K(OK?)
+
+scala> val k = 42
+k: Int = 42
+
+scala> k // was K(OK?)
+res1: Int = 42
+
+scala> :quit
diff --git a/test/files/run/repl-classbased.scala b/test/files/run/repl-classbased.scala
new file mode 100644
index 0000000000..595e123159
--- /dev/null
+++ b/test/files/run/repl-classbased.scala
@@ -0,0 +1,22 @@
+
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+//SI-9740
+object Test extends ReplTest {
+ override def transformSettings(s: Settings): Settings = {
+ s.Yreplclassbased.value = true
+ s
+ }
+
+ def code =
+ """
+case class K(s: String)
+class C { implicit val k: K = K("OK?"); override def toString = s"C($k)" }
+val c = new C
+import c.k
+implicitly[K]
+val k = 42
+k // was K(OK?)
+ """
+}
diff --git a/test/files/run/repl-implicits-nopredef.check b/test/files/run/repl-implicits-nopredef.check
new file mode 100644
index 0000000000..a849801bb4
--- /dev/null
+++ b/test/files/run/repl-implicits-nopredef.check
@@ -0,0 +1,5 @@
+
+scala> :implicits
+No implicits have been imported.
+
+scala> :quit \ No newline at end of file
diff --git a/test/files/run/repl-implicits-nopredef.scala b/test/files/run/repl-implicits-nopredef.scala
new file mode 100644
index 0000000000..8a451b0c52
--- /dev/null
+++ b/test/files/run/repl-implicits-nopredef.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+ override def transformSettings(settings: Settings): Settings = {
+ settings.nopredef.value = true
+ settings
+ }
+ def code = ":implicits"
+}
diff --git a/test/files/run/repl-implicits.check b/test/files/run/repl-implicits.check
new file mode 100644
index 0000000000..6e80cc8799
--- /dev/null
+++ b/test/files/run/repl-implicits.check
@@ -0,0 +1,5 @@
+
+scala> :implicits
+No implicits have been imported other than those in Predef.
+
+scala> :quit \ No newline at end of file
diff --git a/test/files/run/repl-implicits.scala b/test/files/run/repl-implicits.scala
new file mode 100644
index 0000000000..ca8e16e683
--- /dev/null
+++ b/test/files/run/repl-implicits.scala
@@ -0,0 +1,5 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = ":implicits"
+}
diff --git a/test/files/run/t3822.scala b/test/files/run/t3822.scala
deleted file mode 100644
index c35804035e..0000000000
--- a/test/files/run/t3822.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import scala.collection.{ mutable, immutable, generic }
-import immutable.ListSet
-
-object Test {
- def main(args: Array[String]): Unit = {
- val xs = ListSet(-100000 to 100001: _*)
-
- assert(xs.size == 200002)
- assert(xs.sum == 100001)
-
- val ys = ListSet[Int]()
- val ys1 = (1 to 12).grouped(3).foldLeft(ys)(_ ++ _)
- val ys2 = (1 to 12).foldLeft(ys)(_ + _)
-
- assert(ys1 == ys2)
- }
-}
-
-
diff --git a/test/files/run/t6198.scala b/test/files/run/t6198.scala
index 5aa8f1c1cf..65dbaf8160 100644
--- a/test/files/run/t6198.scala
+++ b/test/files/run/t6198.scala
@@ -1,13 +1,6 @@
import scala.collection.immutable._
object Test extends App {
- // test that ListSet.tail does not use a builder
- // we can't test for O(1) behavior, so the best we can do is to
- // check that ls.tail always returns the same instance
- val ls = ListSet.empty[Int] + 1 + 2
-
- if(ls.tail ne ls.tail)
- println("ListSet.tail should not use a builder!")
// class that always causes hash collisions
case class Collision(value:Int) { override def hashCode = 0 }
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
index dffb0e2f98..cb2b3ff449 100644
--- a/test/files/run/t6502.scala
+++ b/test/files/run/t6502.scala
@@ -1,6 +1,5 @@
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ ILoop, replProps }
-import scala.tools.nsc.settings.ClassPathRepresentationType
import scala.tools.partest._
object Test extends StoreReporterDirectTest {
@@ -14,14 +13,6 @@ object Test extends StoreReporterDirectTest {
compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code)
}
- var classPathKind: String = ""
-
- override def settings = {
- val settings = new Settings
- settings.YclasspathImpl.value = classPathKind
- settings
- }
-
def app1 = """
package test
@@ -155,7 +146,7 @@ object Test extends StoreReporterDirectTest {
assert(output.contains("created test6.Z"), output)
}
- def testAll(): Unit = {
+ def show(): Unit = {
test1()
test2()
test3()
@@ -163,11 +154,4 @@ object Test extends StoreReporterDirectTest {
test5()
test6()
}
-
- def show(): Unit = {
- classPathKind = ClassPathRepresentationType.Flat
- testAll()
- classPathKind = ClassPathRepresentationType.Recursive
- testAll()
- }
}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index 4d8429e8f2..31923e7119 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -15,21 +15,21 @@ warning: there was one feature warning; re-run with -feature for details
convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
scala> convert(Some[Int](0))
-<console>:16: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+<console>:15: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
--- because ---
argument expression's type is not compatible with formal parameter type;
found : Some[Int]
required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } }
convert(Some[Int](0))
^
-<console>:16: error: type mismatch;
+<console>:15: error: type mismatch;
found : Some[Int]
required: F[_ <: F[_]]
convert(Some[Int](0))
^
scala> Range(1,2).toArray: Seq[_]
-<console>:15: error: polymorphic expression cannot be instantiated to expected type;
+<console>:14: error: polymorphic expression cannot be instantiated to expected type;
found : [B >: Int]Array[B]
required: Seq[_]
Range(1,2).toArray: Seq[_]
diff --git a/test/files/run/t7445.scala b/test/files/run/t7445.scala
deleted file mode 100644
index e4ffeb8e1a..0000000000
--- a/test/files/run/t7445.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.collection.immutable.ListMap
-
-object Test extends App {
- val a = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5);
- require(a.tail == ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5));
-}
diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala
index e2d0d335b0..1ce8933efb 100644
--- a/test/files/run/t8549.scala
+++ b/test/files/run/t8549.scala
@@ -79,7 +79,7 @@ object Test extends App {
}
}
- // Generated on 20160328-17:47:35 with Scala version 2.12.0-20160328-174205-d46145c)
+ // Generated on 20160515-00:17:51 with Scala version 2.12.0-SNAPSHOT)
overwrite.foreach(updateComment)
check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==")
@@ -145,6 +145,8 @@ object Test extends App {
check(immutable.HashSet(1, 2, 3))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
// TODO provoke HashSetCollision1
+ check(immutable.ListSet())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JEVtcHR5TGlzdFNldCRFiHGwmKwhTAIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0izCZaSia0jYCAAB4cA==")
+ check(immutable.ListSet(1))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JE5vZGX1EX2lizBAdwIAAkwABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0U2V0O0wABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDt4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldIswmWkomtI2AgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldCRFbXB0eUxpc3RTZXQkRYhxsJisIUwCAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==")
check(immutable.ListMap())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJEVtcHR5TGlzdE1hcCSNalsvpBZeDgIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwBC1gfIkUSKsCAAB4cA==")
check(immutable.ListMap(1 -> 2))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJE5vZGWmciM1Yav+8gIAA0wABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0TWFwO0wAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABXZhbHVlcQB+AAJ4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcAQtYHyJFEirAgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcCRFbXB0eUxpc3RNYXAkjWpbL6QWXg4CAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABwAAAAI=")
check(immutable.Queue())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5RdWV1ZZY146W3qSuhAgACTAACaW50ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDtMAANvdXRxAH4AAXhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4cQB+AAQ=")
diff --git a/test/files/run/t8756.check b/test/files/run/t8756.check
new file mode 100644
index 0000000000..9b9dcafe7d
--- /dev/null
+++ b/test/files/run/t8756.check
@@ -0,0 +1,9 @@
+public Bippy<java.lang.Object> Test.f1(long)
+public Bippy<java.lang.Object> Test.f2(long)
+public Bippy<java.lang.Object> Test.i1(Bippy<java.lang.Object>)
+public Bippy<java.lang.Object> Test.i2(Bippy<java.lang.Object>)
+public int Test.g1(long)
+public int Test.g2(long)
+public java.lang.Object Test.h1(long)
+public java.lang.Object Test.h2(long)
+public static void Test.main(java.lang.String[])
diff --git a/test/files/run/t8756.scala b/test/files/run/t8756.scala
new file mode 100644
index 0000000000..edd243473a
--- /dev/null
+++ b/test/files/run/t8756.scala
@@ -0,0 +1,22 @@
+trait Bippy[A]
+
+class Test {
+ type T1 = Long
+ type T2 = Long { type Tag = Nothing }
+
+ def f1(t: T1): Bippy[Object] = ???
+ def f2(t: T2): Bippy[Object] = ???
+ def g1(t: T1): Int = ???
+ def g2(t: T2): Int = ???
+ def h1(t: T1): Object = ???
+ def h2(t: T2): Object = ???
+ def i1(t: Bippy[T1]): Bippy[T1] = ???
+ def i2(t: Bippy[T2]): Bippy[T2] = ???
+
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(classOf[Test].getDeclaredMethods.map(_.toGenericString).toList.sorted.mkString("\n"))
+ }
+}
diff --git a/test/files/run/various-flat-classpath-types.scala b/test/files/run/various-flat-classpath-types.scala
index d39019e885..bc54ffb6cc 100644
--- a/test/files/run/various-flat-classpath-types.scala
+++ b/test/files/run/various-flat-classpath-types.scala
@@ -5,7 +5,7 @@
import java.io.{File => JFile, FileInputStream, FileOutputStream}
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.reflect.io.{Directory, File}
-import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+import scala.tools.nsc.util.ClassPath.RootPackage
import scala.tools.nsc.classpath.PackageNameUtils
import scala.tools.nsc.io.Jar
@@ -80,7 +80,6 @@ object Test {
private val compiler = new scala.tools.nsc.MainClass
private val appRunner = new scala.tools.nsc.MainGenericRunner
- private val classPathImplFlag = "-YclasspathImpl:flat"
private val javaClassPath = sys.props("java.class.path")
// creates a test dir in a temporary dir containing compiled files of this test
@@ -166,13 +165,13 @@ object Test {
val classPath = mkPath(javaClassPath, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
val sourcePath = mkPath(srcDir.path, zipsDir.path + "/Src.zip", jarsDir.path + "/Src.jar")
- compiler.process(Array(classPathImplFlag, "-cp", classPath, "-sourcepath", sourcePath,
+ compiler.process(Array("-cp", classPath, "-sourcepath", sourcePath,
"-d", outDir.path, s"${srcDir.path}/Main.scala"))
}
private def runApp(): Unit = {
val classPath = mkPath(javaClassPath, outDir.path, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
- appRunner.process(Array(classPathImplFlag, "-cp", classPath, "Main"))
+ appRunner.process(Array("-cp", classPath, "Main"))
}
private def createStandardSrcHierarchy(baseFileName: String): Unit =
@@ -200,7 +199,7 @@ object Test {
private def compileSrc(baseFileName: String, destination: JFile = outDir): Unit = {
val srcDirPath = srcDir.path
- compiler.process(Array(classPathImplFlag, "-cp", javaClassPath, "-d", destination.path,
+ compiler.process(Array("-cp", javaClassPath, "-d", destination.path,
s"$srcDirPath/$baseFileName.scala", s"$srcDirPath/nested/Nested$baseFileName.scala"))
}
diff --git a/test/junit/scala/collection/immutable/ListMapTest.scala b/test/junit/scala/collection/immutable/ListMapTest.scala
new file mode 100644
index 0000000000..320a976755
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListMapTest.scala
@@ -0,0 +1,48 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ListMapTest {
+
+ @Test
+ def t7445(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5)
+ assertEquals(ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5), m.tail)
+ }
+
+ @Test
+ def hasCorrectBuilder(): Unit = {
+ val m = ListMap("a" -> "1", "b" -> "2", "c" -> "3", "b" -> "2.2", "d" -> "4")
+ assertEquals(List("a" -> "1", "c" -> "3", "b" -> "2.2", "d" -> "4"), m.toList)
+ }
+
+ @Test
+ def hasCorrectHeadTailLastInit(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+ assertEquals(1 -> 1, m.head)
+ assertEquals(ListMap(2 -> 2, 3 -> 3), m.tail)
+ assertEquals(3 -> 3, m.last)
+ assertEquals(ListMap(1 -> 1, 2 -> 2), m.init)
+ }
+
+ @Test
+ def hasCorrectAddRemove(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3)
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4), m + (4 -> 4))
+ assertEquals(ListMap(1 -> 1, 3 -> 3, 2 -> 4), m + (2 -> 4))
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m + (2 -> 2))
+ assertEquals(ListMap(2 -> 2, 3 -> 3), m - 1)
+ assertEquals(ListMap(1 -> 1, 3 -> 3), m - 2)
+ assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m - 4)
+ }
+
+ @Test
+ def hasCorrectIterator(): Unit = {
+ val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4)
+ assertEquals(List(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4), m.iterator.toList)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/ListSetTest.scala b/test/junit/scala/collection/immutable/ListSetTest.scala
new file mode 100644
index 0000000000..395da88c75
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListSetTest.scala
@@ -0,0 +1,53 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ListSetTest {
+
+ @Test
+ def t7445(): Unit = {
+ val s = ListSet(1, 2, 3, 4, 5)
+ assertEquals(ListSet(2, 3, 4, 5), s.tail)
+ }
+
+ @Test
+ def hasCorrectBuilder(): Unit = {
+ val m = ListSet("a", "b", "c", "b", "d")
+ assertEquals(List("a", "b", "c", "d"), m.toList)
+ }
+
+ @Test
+ def hasTailRecursiveDelete(): Unit = {
+ val s = ListSet(1 to 50000: _*)
+ try s - 25000 catch { case e: StackOverflowError => fail("A stack overflow occurred") }
+ }
+
+ @Test
+ def hasCorrectHeadTailLastInit(): Unit = {
+ val m = ListSet(1, 2, 3)
+ assertEquals(1, m.head)
+ assertEquals(ListSet(2, 3), m.tail)
+ assertEquals(3, m.last)
+ assertEquals(ListSet(1, 2), m.init)
+ }
+
+ @Test
+ def hasCorrectAddRemove(): Unit = {
+ val m = ListSet(1, 2, 3)
+ assertEquals(ListSet(1, 2, 3, 4), m + 4)
+ assertEquals(ListSet(1, 2, 3), m + 2)
+ assertEquals(ListSet(2, 3), m - 1)
+ assertEquals(ListSet(1, 3), m - 2)
+ assertEquals(ListSet(1, 2, 3), m - 4)
+ }
+
+ @Test
+ def hasCorrectIterator(): Unit = {
+ val s = ListSet(1, 2, 3, 5, 4)
+ assertEquals(List(1, 2, 3, 5, 4), s.iterator.toList)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/PriorityQueueTest.scala b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
index a14f1bf4c8..faedcf11f0 100644
--- a/test/junit/scala/collection/mutable/PriorityQueueTest.scala
+++ b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
@@ -14,6 +14,12 @@ class PriorityQueueTest {
priorityQueue.enqueue(elements :_*)
@Test
+ def orderingReverseReverse() {
+ val pq = new mutable.PriorityQueue[Nothing]()((_,_)=>42)
+ assert(pq.ord eq pq.reverse.reverse.ord)
+ }
+
+ @Test
def canSerialize() {
val outputStream = new ByteArrayOutputStream()
new ObjectOutputStream(outputStream).writeObject(priorityQueue)
@@ -27,6 +33,7 @@ class PriorityQueueTest {
val objectInputStream = new ObjectInputStream(new ByteArrayInputStream(bytes))
val deserializedPriorityQueue = objectInputStream.readObject().asInstanceOf[PriorityQueue[Int]]
+ //correct sequencing is also tested here:
assert(deserializedPriorityQueue.dequeueAll == elements.sorted.reverse)
}
}
diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala
index cf5c7f9ec3..a720f20718 100644
--- a/test/junit/scala/issues/BytecodeTest.scala
+++ b/test/junit/scala/issues/BytecodeTest.scala
@@ -15,15 +15,9 @@ import scala.tools.asm.tree.ClassNode
import scala.tools.partest.ASMConverters._
import scala.tools.testing.ClearAfterClass
-object BytecodeTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler()
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class BytecodeTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = BytecodeTest
- val compiler = BytecodeTest.compiler
+ val compiler = cached("compiler", () => newCompiler())
@Test
def t8731(): Unit = {
diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/issues/OptimizedBytecodeTest.scala
index 1555e8945a..c69229ae22 100644
--- a/test/junit/scala/issues/OptimizedBytecodeTest.scala
+++ b/test/junit/scala/issues/OptimizedBytecodeTest.scala
@@ -15,17 +15,10 @@ import AsmUtils._
import scala.tools.testing.ClearAfterClass
-object OptimizedBytecodeTest extends ClearAfterClass.Clearable {
- val args = "-Yopt:l:classpath -Yopt-warnings"
- var compiler = newCompiler(extraArgs = args)
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class OptimizedBytecodeTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = OptimizedBytecodeTest
-
- val compiler = OptimizedBytecodeTest.compiler
+ val args = "-Yopt:l:classpath -Yopt-warnings"
+ val compiler = cached("compiler", () => newCompiler(extraArgs = args))
@Test
def t2171(): Unit = {
@@ -127,7 +120,7 @@ class OptimizedBytecodeTest extends ClearAfterClass {
|object Warmup { def filter[A](p: Any => Boolean): Any = filter[Any](p) }
""".stripMargin
val c2 = "class C { def t = warmup.Warmup.filter[Any](x => false) }"
- val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = OptimizedBytecodeTest.args)
+ val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = args)
assertInvoke(getSingleMethod(c, "t"), "warmup/Warmup$", "filter")
}
@@ -268,7 +261,7 @@ class OptimizedBytecodeTest extends ClearAfterClass {
|}
""".stripMargin
- val cls = compileClassesSeparately(List(c1, c2), extraArgs = OptimizedBytecodeTest.args)
+ val cls = compileClassesSeparately(List(c1, c2), extraArgs = args)
val c = cls.find(_.name == "C").get
assertSameSummary(getSingleMethod(c, "t"), List(
GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, // module load and null checks not yet eliminated
diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala
index 781f2ef343..148009c912 100644
--- a/test/junit/scala/issues/RunTest.scala
+++ b/test/junit/scala/issues/RunTest.scala
@@ -9,22 +9,17 @@ import scala.reflect.runtime._
import scala.tools.reflect.ToolBox
import scala.tools.testing.ClearAfterClass
-object RunTest extends ClearAfterClass.Clearable {
- var toolBox = universe.runtimeMirror(getClass.getClassLoader).mkToolBox()
- override def clear(): Unit = { toolBox = null }
-
- // definitions for individual tests
+object RunTest {
class VC(val x: Any) extends AnyVal
}
@RunWith(classOf[JUnit4])
class RunTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = RunTest
+ val toolBox = cached("toolbox", () => universe.runtimeMirror(getClass.getClassLoader).mkToolBox())
def run[T](code: String): T = {
- val tb = RunTest.toolBox
- tb.eval(tb.parse(code)).asInstanceOf[T]
+ toolBox.eval(toolBox.parse(code)).asInstanceOf[T]
}
@Test
@@ -147,4 +142,16 @@ class RunTest extends ClearAfterClass {
assertEquals(run[String](definitions("Object") + runCode), "hi" * 9)
assertEquals(run[String](definitions("String") + runCode), "hi" * 9) // bridge method for clone generated
}
+
+ @Test
+ def classOfUnitConstant(): Unit = {
+ val code =
+ """abstract class A { def f: Class[_] }
+ |class C extends A { final val f = classOf[Unit] }
+ |val c = new C
+ |(c.f, (c: A).f)
+ """.stripMargin
+ val u = Void.TYPE
+ assertEquals(run[(Class[_], Class[_])](code), (u, u))
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
index 8b8e2b36de..e7bbbb9a4f 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
@@ -10,22 +10,15 @@ import org.junit.Assert._
import scala.tools.nsc.backend.jvm.CodeGenTools._
import scala.tools.testing.ClearAfterClass
-object BTypesTest extends ClearAfterClass.Clearable {
- var compiler = {
+@RunWith(classOf[JUnit4])
+class BTypesTest extends ClearAfterClass {
+ val compiler = cached("compiler", () => {
val comp = newCompiler(extraArgs = "-Yopt:l:none")
new comp.Run() // initializes some of the compiler
comp.exitingDelambdafy(comp.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
comp.exitingDelambdafy(comp.genBCode.bTypes.initializeCoreBTypes())
comp
- }
- def clear(): Unit = { compiler = null }
-}
-
-@RunWith(classOf[JUnit4])
-class BTypesTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = BTypesTest
-
- val compiler = BTypesTest.compiler
+ })
import compiler.genBCode.bTypes._
def classBTFS(sym: compiler.Symbol) = compiler.exitingDelambdafy(classBTypeFromSymbol(sym))
diff --git a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
index fe43ed2f6a..389e5b2ead 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
@@ -206,6 +206,12 @@ object CodeGenTools {
assert(actual == expected, s"\nFound : ${quote(actual)}\nExpected: ${quote(expected)}")
}
+ def assertNoIndy(m: Method): Unit = assertNoIndy(m.instructions)
+ def assertNoIndy(l: List[Instruction]) = {
+ val indy = l collect { case i: InvokeDynamic => i }
+ assert(indy.isEmpty, indy)
+ }
+
def getSingleMethod(classNode: ClassNode, name: String): Method =
convertMethod(classNode.methods.asScala.toList.find(_.name == name).get)
diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala
index 2ce9d21331..7d4ae866fc 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala
@@ -10,14 +10,8 @@ import scala.tools.nsc.backend.jvm.CodeGenTools._
import JavaConverters._
import scala.tools.testing.ClearAfterClass
-object DefaultMethodTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler()
- def clear(): Unit = { compiler = null }
-}
-
class DefaultMethodTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = DefaultMethodTest
- val compiler = DefaultMethodTest.compiler
+ val compiler = cached("compiler", () => newCompiler())
@Test
def defaultMethodsViaGenBCode(): Unit = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
index 0cdc6ead10..e984b75518 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
@@ -9,16 +9,9 @@ import scala.tools.asm.Opcodes._
import scala.tools.partest.ASMConverters._
import scala.tools.testing.ClearAfterClass
-object DirectCompileTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:l:method")
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class DirectCompileTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = DirectCompileTest
-
- val compiler = DirectCompileTest.compiler
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:method"))
@Test
def testCompile(): Unit = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala
index d29f6b0a13..b906942ffa 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala
@@ -10,17 +10,8 @@ import scala.tools.nsc.backend.jvm.CodeGenTools._
import scala.tools.testing.ClearAfterClass
import scala.collection.JavaConverters._
-object IndyLambdaTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler()
-
- def clear(): Unit = {
- compiler = null
- }
-}
-
class IndyLambdaTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = IndyLambdaTest
- val compiler = IndyLambdaTest.compiler
+ val compiler = cached("compiler", () => newCompiler())
@Test def boxingBridgeMethodUsedSelectively(): Unit = {
def implMethodDescriptorFor(code: String): String = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala
index b9e45a7dc9..5c2ab6a2c7 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala
@@ -15,21 +15,13 @@ import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object IndySammyTest extends ClearAfterClass.Clearable {
- var _compiler = newCompiler()
-
- def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] =
- compileClasses(_compiler)(scalaCode, javaCode, allowMessage)
-
- def clear(): Unit = { _compiler = null }
-}
@RunWith(classOf[JUnit4])
class IndySammyTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = IndySammyTest
- import IndySammyTest._
- val compiler = _compiler
+ val compiler = cached("compiler", () => newCompiler())
+ def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] =
+ compileClasses(compiler)(scalaCode, javaCode, allowMessage)
def funClassName(from: String, to: String) = s"Fun$from$to"
def classPrologue(from: String, to: String) =
diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala
index 2a9b8f7198..fc0c96e71a 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala
@@ -14,15 +14,9 @@ import scala.tools.partest.ASMConverters
import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object StringConcatTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler()
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class StringConcatTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = StringConcatTest
- val compiler = StringConcatTest.compiler
+ val compiler = cached("compiler", () => newCompiler())
@Test
def appendOverloadNoBoxing(): Unit = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
index 571d84c872..075f42d18f 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala
@@ -19,18 +19,9 @@ import AsmUtils._
import scala.collection.JavaConverters._
-object NullnessAnalyzerTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none")
-
- def clear(): Unit = {
- noOptCompiler = null
- }
-}
-
@RunWith(classOf[JUnit4])
class NullnessAnalyzerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = NullnessAnalyzerTest
- val noOptCompiler = NullnessAnalyzerTest.noOptCompiler
+ val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
import noOptCompiler.genBCode.bTypes.backendUtils._
def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(noOptCompiler.genBCode.bTypes))
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
index d54b8ac563..8d4bc19ec3 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
@@ -14,18 +14,9 @@ import scala.tools.testing.ClearAfterClass
import CodeGenTools._
import AsmUtils._
-object ProdConsAnalyzerTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none")
-
- def clear(): Unit = {
- noOptCompiler = null
- }
-}
-
@RunWith(classOf[JUnit4])
class ProdConsAnalyzerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = ProdConsAnalyzerTest
- val noOptCompiler = ProdConsAnalyzerTest.noOptCompiler
+ val noOptCompiler =cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
import noOptCompiler.genBCode.bTypes.backendUtils._
def prodToString(producer: AbstractInsnNode) = producer match {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala
index 930f7f2f10..09675870f0 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala
@@ -21,15 +21,9 @@ import BytecodeUtils._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object AnalyzerTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none")
- def clear(): Unit = { noOptCompiler = null }
-}
-
@RunWith(classOf[JUnit4])
class AnalyzerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = AnalyzerTest
- val noOptCompiler = AnalyzerTest.noOptCompiler
+ val noOptCompiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
@Test
def aliasingOfPrimitives(): Unit = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
index 1d30e42e3c..9a27c42cac 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
@@ -24,29 +24,23 @@ import BackendReporting._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object CallGraphTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:inline-global -Yopt-warnings")
- def clear(): Unit = { compiler = null }
-
- // allows inspecting the caches after a compilation run
- val notPerRun: List[Clearable] = List(
- compiler.genBCode.bTypes.classBTypeFromInternalName,
- compiler.genBCode.bTypes.byteCodeRepository.compilingClasses,
- compiler.genBCode.bTypes.byteCodeRepository.parsedClasses,
- compiler.genBCode.bTypes.callGraph.callsites)
- notPerRun foreach compiler.perRunCaches.unrecordCache
-}
-
@RunWith(classOf[JUnit4])
class CallGraphTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = CallGraphTest
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:inline-global -Yopt-warnings")
+ )
+ import compiler.genBCode.bTypes
+ val notPerRun: List[Clearable] = List(
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses,
+ bTypes.callGraph.callsites)
+ notPerRun foreach compiler.perRunCaches.unrecordCache
- val compiler = CallGraphTest.compiler
import compiler.genBCode.bTypes._
import callGraph._
def compile(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
- CallGraphTest.notPerRun.foreach(_.clear())
+ notPerRun.foreach(_.clear())
compileClasses(compiler)(code, allowMessage = allowMessage).map(c => byteCodeRepository.classNode(c.name).get)
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala
index 12bfba71a8..e8530af4e0 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala
@@ -27,16 +27,9 @@ import BackendReporting._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object ClosureOptimizerTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:l:classpath -Yopt-warnings:_")
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class ClosureOptimizerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = ClosureOptimizerTest
-
- val compiler = ClosureOptimizerTest.compiler
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath -Yopt-warnings:_"))
@Test
def nothingTypedClosureBody(): Unit = {
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
index 22aed4207f..6d566c722f 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
@@ -13,21 +13,11 @@ import scala.tools.partest.ASMConverters
import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object EmptyExceptionHandlersTest extends ClearAfterClass.Clearable {
- var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none")
- var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code")
- def clear(): Unit = {
- noOptCompiler = null
- dceCompiler = null
- }
-}
@RunWith(classOf[JUnit4])
class EmptyExceptionHandlersTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = EmptyExceptionHandlersTest
-
- val noOptCompiler = EmptyExceptionHandlersTest.noOptCompiler
- val dceCompiler = EmptyExceptionHandlersTest.dceCompiler
+ val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
+ val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code"))
val exceptionDescriptor = "java/lang/Exception"
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
index 23386bb5ae..5cb1aab4a9 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
@@ -18,25 +18,19 @@ import BackendReporting._
import scala.collection.JavaConverters._
-object InlineInfoTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:l:classpath")
- def clear(): Unit = { compiler = null }
-
- def notPerRun: List[Clearable] = List(
- compiler.genBCode.bTypes.classBTypeFromInternalName,
- compiler.genBCode.bTypes.byteCodeRepository.compilingClasses,
- compiler.genBCode.bTypes.byteCodeRepository.parsedClasses)
- notPerRun foreach compiler.perRunCaches.unrecordCache
-}
-
@RunWith(classOf[JUnit4])
class InlineInfoTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlineInfoTest
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath"))
- val compiler = InlineInfoTest.compiler
+ import compiler.genBCode.bTypes
+ def notPerRun: List[Clearable] = List(
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses)
+ notPerRun foreach compiler.perRunCaches.unrecordCache
def compile(code: String) = {
- InlineInfoTest.notPerRun.foreach(_.clear())
+ notPerRun.foreach(_.clear())
compileClasses(compiler)(code)
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
index 5090e9c83b..6dd0a33289 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
@@ -27,20 +27,12 @@ import BackendReporting._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object InlineWarningTest extends ClearAfterClass.Clearable {
- val argsNoWarn = "-Yopt:l:classpath"
- val args = argsNoWarn + " -Yopt-warnings"
- var compiler = newCompiler(extraArgs = args)
- var compilerWarnAll = newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:_")
- def clear(): Unit = { compiler = null; compilerWarnAll = null }
-}
-
@RunWith(classOf[JUnit4])
class InlineWarningTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlineWarningTest
-
- val compiler = InlineWarningTest.compiler
- val compilerWarnAll = InlineWarningTest.compilerWarnAll
+ val argsNoWarn = "-Yopt:l:classpath"
+ val args = argsNoWarn + " -Yopt-warnings"
+ val compiler = cached("compiler", () => newCompiler(extraArgs = args))
+ val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:_"))
def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false, compiler: Global = compiler): List[ClassNode] = {
compileClasses(compiler)(scalaCode, javaCode, allowMessage)
@@ -103,22 +95,22 @@ class InlineWarningTest extends ClearAfterClass {
val warns = List(
"""failed to determine if bar should be inlined:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin,
+ |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin,
"""B::flop()I is annotated @inline but could not be inlined:
|Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin)
+ |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin)
var c = 0
val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)})
assert(c == 1, c)
// no warnings here
- compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java")))
+ compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java")))
c = 0
- compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)})
+ compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)})
assert(c == 2, c)
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
index 6460158e71..ab1aef47cd 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
@@ -19,16 +19,9 @@ import AsmUtils._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object InlinerIllegalAccessTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:l:none")
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class InlinerIllegalAccessTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlinerIllegalAccessTest
-
- val compiler = InlinerIllegalAccessTest.compiler
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
import compiler.genBCode.bTypes._
def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile)
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
index 1765a355fd..b7641b5ec7 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
@@ -22,35 +22,27 @@ import BackendReporting._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object InlinerTest extends ClearAfterClass.Clearable {
+@RunWith(classOf[JUnit4])
+class InlinerTest extends ClearAfterClass {
val args = "-Yopt:l:classpath -Yopt-warnings"
- var compiler = newCompiler(extraArgs = args)
- var inlineOnlyCompiler = newCompiler(extraArgs = "-Yopt:inline-project")
-
+ val compiler = cached("compiler", () => newCompiler(extraArgs = args))
+ val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-Yopt:inline-project"))
+ import compiler.genBCode.bTypes
// allows inspecting the caches after a compilation run
def notPerRun: List[Clearable] = List(
- compiler.genBCode.bTypes.classBTypeFromInternalName,
- compiler.genBCode.bTypes.byteCodeRepository.compilingClasses,
- compiler.genBCode.bTypes.byteCodeRepository.parsedClasses,
- compiler.genBCode.bTypes.callGraph.callsites)
+ bTypes.classBTypeFromInternalName,
+ bTypes.byteCodeRepository.compilingClasses,
+ bTypes.byteCodeRepository.parsedClasses,
+ bTypes.callGraph.callsites)
notPerRun foreach compiler.perRunCaches.unrecordCache
- def clear(): Unit = { compiler = null; inlineOnlyCompiler = null }
-}
-
-@RunWith(classOf[JUnit4])
-class InlinerTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = InlinerTest
-
- val compiler = InlinerTest.compiler
import compiler.genBCode.bTypes._
import compiler.genBCode.bTypes.backendUtils._
import inlinerHeuristics._
- val inlineOnlyCompiler = InlinerTest.inlineOnlyCompiler
def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
- InlinerTest.notPerRun.foreach(_.clear())
+ notPerRun.foreach(_.clear())
compileClasses(compiler)(scalaCode, javaCode, allowMessage)
// Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same,
// these are created new from the classfile byte array. They are completely separate instances which cannot
@@ -428,7 +420,7 @@ class InlinerTest extends ClearAfterClass {
"""B::flop()I is annotated @inline but could not be inlined:
|Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
|The method bar()I could not be found in the class A or any of its parents.
- |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin
+ |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin
var c = 0
val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn})
@@ -833,11 +825,11 @@ class InlinerTest extends ClearAfterClass {
val warn =
"""failed to determine if <init> should be inlined:
|The method <init>()V could not be found in the class A$Inner or any of its parents.
- |Note that the following parent classes could not be found on the classpath: A$Inner""".stripMargin
+ |Note that the parent class A$Inner could not be found on the classpath.""".stripMargin
var c = 0
- compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-warnings:_"))(
+ compileClasses(newCompiler(extraArgs = args + " -Yopt-warnings:_"))(
scalaCode,
List((javaCode, "A.java")),
allowMessage = i => {c += 1; i.msg contains warn})
@@ -899,7 +891,7 @@ class InlinerTest extends ClearAfterClass {
| def t = System.arraycopy(null, 0, null, 0, 0)
|}
""".stripMargin
- val List(c) = compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-inline-heuristics:everything"))(code)
+ val List(c) = compileClasses(newCompiler(extraArgs = args + " -Yopt-inline-heuristics:everything"))(code)
assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy")
}
@@ -955,18 +947,12 @@ class InlinerTest extends ClearAfterClass {
val List(c, _, _) = compile(code)
val t1 = getSingleMethod(c, "t1")
- assert(t1.instructions forall { // indy is eliminated by push-pop
- case _: InvokeDynamic => false
- case _ => true
- })
+ assertNoIndy(t1)
// the indy call is inlined into t, and the closure elimination rewrites the closure invocation to the body method
assertInvoke(t1, "C", "C$$$anonfun$2")
val t2 = getSingleMethod(c, "t2")
- assert(t2.instructions forall { // indy is eliminated by push-pop
- case _: InvokeDynamic => false
- case _ => true
- })
+ assertNoIndy(t2)
assertInvoke(t2, "M$", "M$$$anonfun$1")
}
@@ -1492,4 +1478,31 @@ class InlinerTest extends ClearAfterClass {
// the forwarder C.f is inlined, so there's no invocation
assertSameSummary(getSingleMethod(c, "f"), List(ICONST_1, IRETURN))
}
+
+ @Test
+ def sd140(): Unit = {
+ val code =
+ """trait T { @inline def f = 0 }
+ |trait U extends T { @inline override def f = 1 }
+ |trait V extends T { def m = 0 }
+ |final class K extends V with U { override def m = super[V].m }
+ |class C { def t = (new K).f }
+ """.stripMargin
+ val c :: _ = compile(code)
+ assertSameSummary(getSingleMethod(c, "t"), List(NEW, "<init>", ICONST_1, IRETURN)) // ICONST_1, U.f is inlined (not T.f)
+ }
+
+ @Test
+ def inlineArrayForeach(): Unit = {
+ val code =
+ """class C {
+ | def consume(x: Int) = ()
+ | def t(a: Array[Int]): Unit = a foreach consume
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ val t = getSingleMethod(c, "t")
+ assertNoIndy(t)
+ assertInvoke(t, "C", "C$$$anonfun$1")
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala
index dd7fbd9977..003b2d4880 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala
@@ -18,16 +18,9 @@ import ASMConverters._
import scala.tools.testing.ClearAfterClass
import scala.collection.JavaConverters._
-object MethodLevelOptsTest extends ClearAfterClass.Clearable {
- var methodOptCompiler = newCompiler(extraArgs = "-Yopt:l:method")
- def clear(): Unit = { methodOptCompiler = null }
-}
-
@RunWith(classOf[JUnit4])
class MethodLevelOptsTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = MethodLevelOptsTest
-
- val methodOptCompiler = MethodLevelOptsTest.methodOptCompiler
+ val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method"))
def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
index 8dd23ec3ce..6cb3fd3bba 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
@@ -16,15 +16,9 @@ import ASMConverters._
import scala.collection.JavaConverters._
import scala.tools.testing.ClearAfterClass
-object ScalaInlineInfoTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler(extraArgs = "-Yopt:l:none")
- def clear(): Unit = { compiler = null }
-}
-
@RunWith(classOf[JUnit4])
class ScalaInlineInfoTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = ScalaInlineInfoTest
- val compiler = ScalaInlineInfoTest.compiler
+ val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
index 0021a1784d..46f06d1d39 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
@@ -15,27 +15,13 @@ import scala.tools.partest.ASMConverters
import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object UnreachableCodeTest extends ClearAfterClass.Clearable {
- // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks,
- // see comment in BCodeBodyBuilder
- var methodOptCompiler = newCompiler(extraArgs = "-Yopt:l:method")
- var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code")
- var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none")
-
- def clear(): Unit = {
- methodOptCompiler = null
- dceCompiler = null
- noOptCompiler = null
- }
-}
-
@RunWith(classOf[JUnit4])
class UnreachableCodeTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = UnreachableCodeTest
-
- val methodOptCompiler = UnreachableCodeTest.methodOptCompiler
- val dceCompiler = UnreachableCodeTest.dceCompiler
- val noOptCompiler = UnreachableCodeTest.noOptCompiler
+ // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks,
+ // see comment in BCodeBodyBuilder
+ val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method"))
+ val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code"))
+ val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none"))
def assertEliminateDead(code: (Instruction, Boolean)*): Unit = {
val method = genMethod()(code.map(_._1): _*)
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
index 4f71df1822..77e73e64b9 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
@@ -14,16 +14,9 @@ import scala.tools.partest.ASMConverters
import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object UnusedLocalVariablesTest extends ClearAfterClass.Clearable {
- var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code")
- def clear(): Unit = { dceCompiler = null }
-}
-
@RunWith(classOf[JUnit4])
class UnusedLocalVariablesTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = UnusedLocalVariablesTest
-
- val dceCompiler = UnusedLocalVariablesTest.dceCompiler
+ val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code"))
@Test
def removeUnusedVar(): Unit = {
diff --git a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala
index 9a004d5e0e..a7aca31ee3 100644
--- a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala
+++ b/test/junit/scala/tools/nsc/classpath/AggregateClassPathTest.scala
@@ -10,6 +10,7 @@ import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.reflect.io.VirtualFile
import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
/**
* Tests whether AggregateFlatClassPath returns correct entries taken from
@@ -17,14 +18,14 @@ import scala.tools.nsc.io.AbstractFile
* (in the case of the repeated entry for a class or a source it returns the first one).
*/
@RunWith(classOf[JUnit4])
-class AggregateFlatClassPathTest {
+class AggregateClassPathTest {
- private class TestFlatClassPath extends FlatClassPath {
+ private abstract class TestClassPathBase extends ClassPath {
override def packages(inPackage: String): Seq[PackageEntry] = unsupported
override def sources(inPackage: String): Seq[SourceFileEntry] = unsupported
override def classes(inPackage: String): Seq[ClassFileEntry] = unsupported
- override def list(inPackage: String): FlatClassPathEntries = unsupported
+ override def list(inPackage: String): ClassPathEntries = unsupported
override def findClassFile(name: String): Option[AbstractFile] = unsupported
override def asClassPathStrings: Seq[String] = unsupported
@@ -32,7 +33,7 @@ class AggregateFlatClassPathTest {
override def asURLs: Seq[URL] = unsupported
}
- private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def classes(inPackage: String): Seq[ClassFileEntry] =
for {
@@ -43,10 +44,10 @@ class AggregateFlatClassPathTest {
override def sources(inPackage: String): Seq[SourceFileEntry] = Nil
// we'll ignore packages
- override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, classes(inPackage))
+ override def list(inPackage: String): ClassPathEntries = ClassPathEntries(Nil, classes(inPackage))
}
- private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def sources(inPackage: String): Seq[SourceFileEntry] =
for {
@@ -57,7 +58,7 @@ class AggregateFlatClassPathTest {
override def classes(inPackage: String): Seq[ClassFileEntry] = Nil
// we'll ignore packages
- override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, sources(inPackage))
+ override def list(inPackage: String): ClassPathEntries = ClassPathEntries(Nil, sources(inPackage))
}
private case class EntryNamesInPackage(inPackage: String)(val names: String*)
@@ -88,7 +89,7 @@ class AggregateFlatClassPathTest {
private def virtualFile(pathPrefix: String, inPackage: String, fileName: String, extension: String) = {
val packageDirs =
- if (inPackage == FlatClassPath.RootPackage) ""
+ if (inPackage == ClassPath.RootPackage) ""
else inPackage.split('.').mkString("/", "/", "")
new VirtualFile(fileName + extension, s"$pathPrefix$packageDirs/$fileName$extension")
}
@@ -101,12 +102,12 @@ class AggregateFlatClassPathTest {
TestSourcePath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
)
- AggregateFlatClassPath(partialClassPaths)
+ AggregateClassPath(partialClassPaths)
}
@Test
def testGettingPackages: Unit = {
- case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestClassPathBase {
override def packages(inPackage: String): Seq[PackageEntry] =
packagesInPackage.find(_.inPackage == inPackage).map(_.names).getOrElse(Nil) map PackageEntryImpl
}
@@ -115,7 +116,7 @@ class AggregateFlatClassPathTest {
ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.c", "pkg1.b", "pkg1.a"),
EntryNamesInPackage(pkg2)("pkg2.d", "pkg2.a", "pkg2.e"))
)
- val cp = AggregateFlatClassPath(partialClassPaths)
+ val cp = AggregateClassPath(partialClassPaths)
val packagesInPkg1 = Seq("pkg1.a", "pkg1.d", "pkg1.f", "pkg1.c", "pkg1.b")
assertEquals(packagesInPkg1, cp.packages(pkg1).map(_.name))
@@ -156,7 +157,7 @@ class AggregateFlatClassPathTest {
TestClassPath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I")),
TestClassPath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
)
- val cp = AggregateFlatClassPath(partialClassPaths)
+ val cp = AggregateClassPath(partialClassPaths)
val sourcesInPkg1 = Seq(sourceFileEntry(dir2, pkg1, "C"),
sourceFileEntry(dir2, pkg1, "B"),
@@ -190,7 +191,7 @@ class AggregateFlatClassPathTest {
)
assertEquals(classesAndSourcesInPkg1, cp.list(pkg1).classesAndSources)
- assertEquals(FlatClassPathEntries(Nil, Nil), cp.list(nonexistingPkg))
+ assertEquals(ClassPathEntries(Nil, Nil), cp.list(nonexistingPkg))
}
@Test
diff --git a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala
index 5dee488285..d3d4289d8b 100644
--- a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala
+++ b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala
@@ -9,20 +9,17 @@ import org.junit._
import org.junit.rules.TemporaryFolder
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import scala.annotation.tailrec
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.Settings
-import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
@RunWith(classOf[JUnit4])
-class FlatClassPathResolverTest {
+class PathResolverBaseTest {
val tempDir = new TemporaryFolder()
- private val packagesToTest = List(FlatClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io")
- private val classFilesToFind = List("scala.tools.util.FlatClassPathResolver",
+ private val packagesToTest = List(ClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io")
+ private val classFilesToFind = List("scala.tools.util.PathResolver",
"scala.reflect.io.AbstractFile",
"scala.collection.immutable.List",
"scala.Option",
@@ -60,7 +57,7 @@ class FlatClassPathResolverTest {
def deleteTempDir: Unit = tempDir.delete()
private def createFlatClassPath(settings: Settings) =
- new FlatClassPathResolver(settings).result
+ new PathResolver(settings).result
@Test
def testEntriesFromListOperationAgainstSeparateMethods: Unit = {
@@ -70,7 +67,7 @@ class FlatClassPathResolverTest {
val packages = classPath.packages(inPackage)
val classes = classPath.classes(inPackage)
val sources = classPath.sources(inPackage)
- val FlatClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage)
+ val ClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage)
val packageNames = packages.map(_.name).sorted
val packageNamesFromList = packagesFromList.map(_.name).sorted
@@ -96,52 +93,6 @@ class FlatClassPathResolverTest {
}
@Test
- def testCreatedEntriesAgainstRecursiveClassPath: Unit = {
- val flatClassPath = createFlatClassPath(settings)
- val recursiveClassPath = new PathResolver(settings).result
-
- def compareEntriesInPackage(inPackage: String): Unit = {
-
- @tailrec
- def traverseToPackage(packageNameParts: Seq[String], cp: ClassPath[AbstractFile]): ClassPath[AbstractFile] = {
- packageNameParts match {
- case Nil => cp
- case h :: t =>
- cp.packages.find(_.name == h) match {
- case Some(nestedCp) => traverseToPackage(t, nestedCp)
- case _ => throw new Exception(s"There's no package $inPackage in recursive classpath - error when searching for '$h'")
- }
- }
- }
-
- val packageNameParts = if (inPackage == FlatClassPath.RootPackage) Nil else inPackage.split('.').toList
- val recursiveClassPathInPackage = traverseToPackage(packageNameParts, recursiveClassPath)
-
- val flatCpPackages = flatClassPath.packages(inPackage).map(_.name)
- val pkgPrefix = PackageNameUtils.packagePrefix(inPackage)
- val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name)
- assertEquals(s"Packages in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpPackages, flatCpPackages)
-
- val flatCpSources = flatClassPath.sources(inPackage).map(_.name).sorted
- val recursiveCpSources = recursiveClassPathInPackage.classes
- .filter(_.source.nonEmpty)
- .map(_.name).sorted
- assertEquals(s"Source entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpSources, flatCpSources)
-
- val flatCpClasses = flatClassPath.classes(inPackage).map(_.name).sorted
- val recursiveCpClasses = recursiveClassPathInPackage.classes
- .filter(_.binary.nonEmpty)
- .map(_.name).sorted
- assertEquals(s"Class entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
- recursiveCpClasses, flatCpClasses)
- }
-
- packagesToTest foreach compareEntriesInPackage
- }
-
- @Test
def testFindClassFile: Unit = {
val classPath = createFlatClassPath(settings)
classFilesToFind foreach { className =>
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
index 812c298c48..8cc7aefdd3 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -3,11 +3,8 @@ package symtab
import scala.reflect.ClassTag
import scala.reflect.internal.{NoPhase, Phase, SomePhase}
-import scala.tools.nsc.classpath.FlatClassPath
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
-import util.{ClassFileLookup, ClassPath}
+import util.ClassPath
import io.AbstractFile
/**
@@ -30,8 +27,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
override def isCompilerUniverse: Boolean = true
- def classPath = platform.classPath
- def flatClassPath: FlatClassPath = platform.flatClassPath
+ def classPath: ClassPath = platform.classPath
object platform extends backend.Platform {
val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
@@ -39,22 +35,12 @@ class SymbolTableForUnitTesting extends SymbolTable {
def platformPhases: List[SubComponent] = Nil
- lazy val classPath: ClassPath[AbstractFile] = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
- "It's not possible to use the recursive classpath representation, when it's not the chosen classpath scanning method")
- new PathResolver(settings).result
- }
-
- private[nsc] lazy val flatClassPath: FlatClassPath = {
- assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
- "It's not possible to use the flat classpath representation, when it's not the chosen classpath scanning method")
- new FlatClassPathResolver(settings).result
- }
+ private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result
def isMaybeBoxed(sym: Symbol): Boolean = ???
def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ???
def externalEquals: Symbol = ???
- def updateClassPath(subst: Map[ClassFileLookup[AbstractFile], ClassFileLookup[AbstractFile]]): Unit = ???
+ def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = ???
}
object loaders extends symtab.SymbolLoaders {
@@ -69,10 +55,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
class GlobalMirror extends Roots(NoSymbol) {
val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
- def rootLoader: LazyType = settings.YclasspathImpl.value match {
- case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
- case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(classPath)
- }
+ def rootLoader: LazyType = new loaders.PackageLoader(ClassPath.RootPackage, classPath)
override def toString = "compiler mirror"
}
diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
index ac558e2e21..aa83520efb 100644
--- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
+++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala
@@ -16,18 +16,10 @@ import scala.tools.partest.ASMConverters
import ASMConverters._
import scala.tools.testing.ClearAfterClass
-object PatmatBytecodeTest extends ClearAfterClass.Clearable {
- var compiler = newCompiler()
- var optCompiler = newCompiler(extraArgs = "-Yopt:l:project")
- def clear(): Unit = { compiler = null; optCompiler = null }
-}
-
@RunWith(classOf[JUnit4])
class PatmatBytecodeTest extends ClearAfterClass {
- ClearAfterClass.stateToClear = PatmatBytecodeTest
-
- val compiler = PatmatBytecodeTest.compiler
- val optCompiler = PatmatBytecodeTest.optCompiler
+ val compiler = cached("compiler", () => newCompiler())
+ val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-Yopt:l:project"))
@Test
def t6956(): Unit = {
diff --git a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
deleted file mode 100644
index f2926e3e17..0000000000
--- a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (c) 2014 Contributor. All rights reserved.
- */
-package scala.tools.nsc.util
-
-import scala.reflect.io.AbstractFile
-import scala.tools.nsc.Settings
-import scala.tools.nsc.settings.ClassPathRepresentationType
-import scala.tools.util.PathResolverFactory
-
-/**
- * Simple application to compare efficiency of the recursive and the flat classpath representations
- */
-object ClassPathImplComparator {
-
- private class TestSettings extends Settings {
- val checkClasses = PathSetting("-checkClasses", "Specify names of classes which should be found separated with ;", "")
- val requiredIterations = IntSetting("-requiredIterations",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- val cpCreationRepetitions = IntSetting("-cpCreationRepetitions",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- val cpLookupRepetitions = IntSetting("-cpLookupRepetitions",
- "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
- }
-
- private class DurationStats(name: String) {
- private var sum = 0L
- private var iterations = 0
-
- def noteMeasuredTime(millis: Long): Unit = {
- sum += millis
- iterations += 1
- }
-
- def printResults(): Unit = {
- val avg = if (iterations == 0) 0 else sum.toDouble / iterations
- println(s"$name - total duration: $sum ms; iterations: $iterations; avg: $avg ms")
- }
- }
-
- private lazy val defaultClassesToFind = List(
- "scala.collection.immutable.List",
- "scala.Option",
- "scala.Int",
- "scala.collection.immutable.Vector",
- "scala.util.hashing.MurmurHash3"
- )
-
- private val oldCpCreationStats = new DurationStats("Old classpath - create")
- private val oldCpSearchingStats = new DurationStats("Old classpath - search")
-
- private val flatCpCreationStats = new DurationStats("Flat classpath - create")
- private val flatCpSearchingStats = new DurationStats("Flat classpath - search")
-
- def main(args: Array[String]): Unit = {
-
- if (args contains "-help")
- usage()
- else {
- val oldCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Recursive)
- val flatCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Flat)
-
- val classesToCheck = oldCpSettings.checkClasses.value
- val classesToFind =
- if (classesToCheck.isEmpty) defaultClassesToFind
- else classesToCheck.split(";").toList
-
- def doTest(classPath: => ClassFileLookup[AbstractFile], cpCreationStats: DurationStats, cpSearchingStats: DurationStats,
- cpCreationRepetitions: Int, cpLookupRepetitions: Int)= {
-
- def createClassPaths() = (1 to cpCreationRepetitions).map(_ => classPath).last
- def testClassLookup(cp: ClassFileLookup[AbstractFile]): Boolean = (1 to cpCreationRepetitions).foldLeft(true) {
- case (a, _) => a && checkExistenceOfClasses(classesToFind)(cp)
- }
-
- val cp = withMeasuredTime("Creating classpath", createClassPaths(), cpCreationStats)
- val result = withMeasuredTime("Searching for specified classes", testClassLookup(cp), cpSearchingStats)
- println(s"The end of the test case. All expected classes found = $result \n")
- }
-
- (1 to oldCpSettings.requiredIterations.value) foreach { iteration =>
- if (oldCpSettings.requiredIterations.value > 1)
- println(s"Iteration no $iteration")
-
- println("Recursive (old) classpath representation:")
- doTest(PathResolverFactory.create(oldCpSettings).result, oldCpCreationStats, oldCpSearchingStats,
- oldCpSettings.cpCreationRepetitions.value, oldCpSettings.cpLookupRepetitions.value)
-
- println("Flat classpath representation:")
- doTest(PathResolverFactory.create(flatCpSettings).result, flatCpCreationStats, flatCpSearchingStats,
- flatCpSettings.cpCreationRepetitions.value, flatCpSettings.cpLookupRepetitions.value)
- }
-
- if (oldCpSettings.requiredIterations.value > 1) {
- println("\nOld classpath - summary")
- oldCpCreationStats.printResults()
- oldCpSearchingStats.printResults()
-
- println("\nFlat classpath - summary")
- flatCpCreationStats.printResults()
- flatCpSearchingStats.printResults()
- }
- }
- }
-
- /**
- * Prints usage information
- */
- private def usage(): Unit =
- println("""Use classpath and sourcepath options like in the case of e.g. 'scala' command.
- | There are also two additional options:
- | -checkClasses <semicolon separated class names> Specify names of classes which should be found
- | -requiredIterations <int value> Repeat tests specified count of times (to check e.g. impact of caches)
- | Note: Option -YclasspathImpl will be set automatically for each case.
- """.stripMargin.trim)
-
- private def loadSettings(args: List[String], implType: String) = {
- val settings = new TestSettings()
- settings.processArguments(args, processAll = true)
- settings.YclasspathImpl.value = implType
- if (settings.classpath.isDefault)
- settings.classpath.value = sys.props("java.class.path")
- settings
- }
-
- private def withMeasuredTime[T](operationName: String, f: => T, durationStats: DurationStats): T = {
- val startTime = System.currentTimeMillis()
- val res = f
- val elapsed = System.currentTimeMillis() - startTime
- durationStats.noteMeasuredTime(elapsed)
- println(s"$operationName - elapsed $elapsed ms")
- res
- }
-
- private def checkExistenceOfClasses(classesToCheck: Seq[String])(classPath: ClassFileLookup[AbstractFile]): Boolean =
- classesToCheck.foldLeft(true) {
- case (res, classToCheck) =>
- val found = classPath.findClass(classToCheck).isDefined
- if (!found)
- println(s"Class $classToCheck not found") // of course in this case the measured time will be affected by IO operation
- found
- }
-}
diff --git a/test/junit/scala/tools/testing/ClearAfterClass.java b/test/junit/scala/tools/testing/ClearAfterClass.java
index 232d459c4e..95e170ec13 100644
--- a/test/junit/scala/tools/testing/ClearAfterClass.java
+++ b/test/junit/scala/tools/testing/ClearAfterClass.java
@@ -1,20 +1,53 @@
package scala.tools.testing;
-import org.junit.AfterClass;
+import org.junit.ClassRule;
+import org.junit.rules.TestRule;
+import org.junit.runners.model.Statement;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
/**
- * Extend this class to use JUnit's @AfterClass. This annotation only works on static methods,
+ * Extend this class to use JUnit's @ClassRule. This annotation only works on static methods,
* which cannot be written in Scala.
*
* Example: {@link scala.tools.nsc.backend.jvm.opt.InlinerTest}
*/
public class ClearAfterClass {
- public static interface Clearable {
- void clear();
+ private static Map<Class<?>, Map<String, Object>> cache = new ConcurrentHashMap<>();
+
+ @ClassRule
+ public static TestRule clearClassCache() {
+ return (statement, desc) -> new Statement() {
+ @Override
+ public void evaluate() throws Throwable {
+ ConcurrentHashMap<String, Object> perClassCache = new ConcurrentHashMap<>();
+ cache.put(desc.getTestClass(), perClassCache);
+ try {
+ statement.evaluate();
+ } finally {
+ perClassCache.values().forEach(ClearAfterClass::closeIfClosable);
+ cache.remove(desc.getTestClass());
+ }
+ }
+ };
}
- public static Clearable stateToClear;
+ private static void closeIfClosable(Object o) {
+ if (o instanceof Closeable) {
+ try {
+ ((Closeable) o).close();
+ } catch (IOException e) {
+ // ignore
+ }
+ }
+ }
+
+ public <T> T cached(String key, scala.Function0<T> t) {
+ Map<String, Object> perClassCache = cache.get(getClass());
+ return (T) perClassCache.computeIfAbsent(key, s -> t.apply());
+ }
- @AfterClass
- public static void clearState() { stateToClear.clear(); }
}
diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala
index 5b13397d6a..06d0445e1c 100644
--- a/test/junit/scala/util/matching/RegexTest.scala
+++ b/test/junit/scala/util/matching/RegexTest.scala
@@ -6,6 +6,8 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil._
+
@RunWith(classOf[JUnit4])
class RegexTest {
@Test def t8022CharSequence(): Unit = {
@@ -44,4 +46,66 @@ class RegexTest {
}
assertEquals(List((1,2),(3,4),(5,6)), z)
}
+
+ @Test def `SI-9666: use inline group names`(): Unit = {
+ val r = new Regex("a(?<Bee>b*)c")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertFalse(ms.hasNext)
+ }
+
+ @Test def `SI-9666: use explicit group names`(): Unit = {
+ val r = new Regex("a(b*)c", "Bee")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertFalse(ms.hasNext)
+ }
+
+ @Test def `SI-9666: fall back to explicit group names`(): Unit = {
+ val r = new Regex("a(?<Bar>b*)c", "Bee")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ assertEquals("abbbc", ms.next())
+ assertEquals("bbb", ms group "Bee")
+ assertEquals("bbb", ms group "Bar")
+ assertTrue(ms.hasNext)
+ assertEquals("abc", ms.next())
+ assertEquals("b", ms group "Bee")
+ assertEquals("b", ms group "Bar")
+ assertFalse(ms.hasNext)
+ }
+
+ //type NoGroup = NoSuchElementException
+ type NoGroup = IllegalArgumentException
+
+ @Test def `SI-9666: throw on bad name`(): Unit = {
+ assertThrows[NoGroup] {
+ val r = new Regex("a(?<Bar>b*)c")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ assertThrows[NoGroup] {
+ val r = new Regex("a(?<Bar>b*)c", "Bar")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ assertThrows[NoGroup] {
+ val r = new Regex("a(b*)c", "Bar")
+ val ms = r findAllIn "stuff abbbc more abc and so on"
+ assertTrue(ms.hasNext)
+ ms group "Bee"
+ }
+ }
}
diff --git a/test/scaladoc/run/t9752.check b/test/scaladoc/run/t9752.check
new file mode 100644
index 0000000000..daeafb8ecc
--- /dev/null
+++ b/test/scaladoc/run/t9752.check
@@ -0,0 +1,5 @@
+List(Body(List(Paragraph(Chain(List(Summary(Text())))), Code(class A
+
+
+class B))))
+Done.
diff --git a/test/scaladoc/run/t9752.scala b/test/scaladoc/run/t9752.scala
new file mode 100644
index 0000000000..b11c7f5c32
--- /dev/null
+++ b/test/scaladoc/run/t9752.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = s"""
+ /**
+ * Foo
+ *
+ * @example
+ * {{{
+ * class A
+ *
+ *
+ * class B
+ * }}}
+ */
+ object Foo
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val obj = root._object("Foo")
+ println(obj.comment.get.example)
+ }
+}
diff --git a/versions.properties b/versions.properties
index d4112325d2..0d80d5b1cf 100644
--- a/versions.properties
+++ b/versions.properties
@@ -26,11 +26,11 @@ scala-xml.version.number=1.0.5
scala-parser-combinators.version.number=1.0.4
scala-swing.version.number=2.0.0-M2
scala-swing.version.osgi=2.0.0.M2
-jline.version=2.12.1
+jline.version=2.14.1
scala-asm.version=5.0.4-scala-3
# external modules, used internally (not shipped)
-partest.version.number=1.0.13
+partest.version.number=1.0.14
scalacheck.version.number=1.11.6
# TODO: modularize the compiler