summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLukas Rytz <lukas.rytz@gmail.com>2015-09-18 10:33:28 +0200
committerLukas Rytz <lukas.rytz@gmail.com>2015-09-18 10:33:28 +0200
commit133e7d053cc62ce0703d611e34fa750175cc3b48 (patch)
tree918e2dd3d8523f41ad8b1da4cf09b5580a13bd25
parent91cd6d1a3db422c576f15eceb0715c572ec44081 (diff)
parent76269ca7a63848aee1f141da75be8ca436bf9e6c (diff)
downloadscala-133e7d053cc62ce0703d611e34fa750175cc3b48.tar.gz
scala-133e7d053cc62ce0703d611e34fa750175cc3b48.tar.bz2
scala-133e7d053cc62ce0703d611e34fa750175cc3b48.zip
Merge remote-tracking branch 'upstream/2.12.x' into opt/heuristics
-rw-r--r--CONTRIBUTING.md4
-rw-r--r--README.md76
-rw-r--r--bincompat-forward.whitelist.conf9
-rw-r--r--docs/TODO2
-rwxr-xr-xscripts/jobs/integrate/bootstrap6
-rw-r--r--spec/01-lexical-syntax.md42
-rw-r--r--spec/05-classes-and-objects.md2
-rw-r--r--spec/06-expressions.md2
-rw-r--r--spec/07-implicits.md2
-rw-r--r--spec/12-the-scala-standard-library.md6
-rw-r--r--spec/13-syntax-summary.md5
-rw-r--r--spec/15-changelog.md4
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala12
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala6
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala180
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala59
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala88
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala137
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala4
-rw-r--r--src/compiler/scala/tools/reflect/ReflectGlobal.scala16
-rw-r--r--src/library-aux/scala/Any.scala2
-rw-r--r--src/library/scala/AnyVal.scala2
-rw-r--r--src/library/scala/Equals.scala5
-rw-r--r--src/library/scala/Option.scala2
-rw-r--r--src/library/scala/Predef.scala2
-rw-r--r--src/library/scala/annotation/implicitAmbiguous.scala34
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala8
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala9
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala5
-rw-r--r--src/library/scala/collection/immutable/Set.scala38
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala6
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala250
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala15
-rw-r--r--src/library/scala/concurrent/SyncVar.scala2
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala14
-rw-r--r--src/library/scala/concurrent/util/Unsafe.java38
-rw-r--r--src/library/scala/io/Codec.scala2
-rw-r--r--src/library/scala/reflect/ClassManifestDeprecatedApis.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala8
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala2
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala2
-rw-r--r--src/library/scala/sys/process/package.scala3
-rw-r--r--src/library/scala/util/Sorting.scala2
-rw-r--r--src/library/scala/util/control/Exception.scala2
-rw-r--r--src/reflect/scala/reflect/api/Types.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala11
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala4
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala22
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala9
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala31
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala2
-rw-r--r--test/files/jvm/interpreter.check6
-rw-r--r--test/files/neg/implicit-ambiguous-2.check4
-rw-r--r--test/files/neg/implicit-ambiguous-2.scala11
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.check7
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.flags1
-rw-r--r--test/files/neg/implicit-ambiguous-invalid.scala6
-rw-r--r--test/files/neg/implicit-ambiguous.check4
-rw-r--r--test/files/neg/implicit-ambiguous.scala11
-rw-r--r--test/files/neg/name-lookup-stable.check2
-rw-r--r--test/files/neg/name-lookup-stable.scala2
-rw-r--r--test/files/neg/t5376.scala2
-rw-r--r--test/files/neg/t6666d.check4
-rw-r--r--test/files/neg/t6810.check28
-rw-r--r--test/files/neg/t6810.scala26
-rw-r--r--test/files/neg/t8597b.scala2
-rw-r--r--test/files/neg/t8675b.scala2
-rw-r--r--test/files/neg/virtpatmat_exhaust_compound.scala2
-rw-r--r--test/files/pos/t2405.scala4
-rw-r--r--test/files/pos/t6666d.scala (renamed from test/files/neg/t6666d.scala)0
-rw-r--r--test/files/pos/t8002-nested-scope.scala2
-rw-r--r--test/files/pos/t9442.scala14
-rw-r--r--test/files/run/constrained-types.check8
-rw-r--r--test/files/run/dead-code-elimination.scala2
-rw-r--r--test/files/run/idempotency-case-classes.check3
-rw-r--r--test/files/run/kind-repl-command.check2
-rw-r--r--test/files/run/names-defaults.scala2
-rw-r--r--test/files/run/nothingTypeNoOpt.scala2
-rw-r--r--test/files/run/reify-repl-fail-gracefully.check2
-rw-r--r--test/files/run/reify_newimpl_22.check2
-rw-r--r--test/files/run/reify_newimpl_23.check2
-rw-r--r--test/files/run/reify_newimpl_25.check2
-rw-r--r--test/files/run/reify_newimpl_26.check2
-rw-r--r--test/files/run/repl-bare-expr.check12
-rw-r--r--test/files/run/repl-parens.check12
-rw-r--r--test/files/run/repl-paste-2.check2
-rw-r--r--test/files/run/repl-power.check3
-rw-r--r--test/files/run/repl-power.scala5
-rw-r--r--test/files/run/repl-reset.check8
-rw-r--r--test/files/run/repl-serialization.check1
-rw-r--r--test/files/run/repl-trim-stack-trace.scala6
-rw-r--r--test/files/run/t1931.scala43
-rw-r--r--test/files/run/t4542.check2
-rw-r--r--test/files/run/t4594-repl-settings.scala2
-rw-r--r--test/files/run/t5655.check4
-rw-r--r--test/files/run/t7319.check6
-rw-r--r--test/files/run/t7747-repl.check32
-rw-r--r--test/files/run/t8047.scala2
-rw-r--r--test/files/run/t8346.check6
-rw-r--r--test/files/run/t8346.scala34
-rw-r--r--test/files/run/t9170.scala24
-rw-r--r--test/files/run/t9206.scala4
-rw-r--r--test/files/run/t9375.check60
-rw-r--r--test/files/run/t9375.scala276
-rw-r--r--test/files/run/t9425.scala8
-rw-r--r--test/files/run/toolbox_expand_macro.check1
-rw-r--r--test/files/run/toolbox_expand_macro.scala23
-rw-r--r--test/files/run/xMigration.check6
-rw-r--r--test/files/scalacheck/avl.scala112
-rw-r--r--test/junit/scala/collection/immutable/SetTests.scala81
-rw-r--r--test/junit/scala/collection/immutable/StreamTest.scala16
-rw-r--r--test/junit/scala/runtime/ScalaRunTimeTest.scala57
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala2
146 files changed, 1440 insertions, 984 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e6557d78dd..d01a71b9bd 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -86,7 +86,7 @@ A pull request should consist of commits with messages that clearly state what p
Commit logs should be stated in the active, present tense.
-A commit's subject should be 60 characters or less. Overall, think of
+A commit's subject should be 72 characters or less. Overall, think of
the first line of the commit as a description of the action performed
by the commit on the code base, so use the active voice and the
present tense. That also makes the commit subjects easy to reuse in
@@ -113,7 +113,7 @@ Our pull request bot, Scabot, automatically builds all the commits in a PR indiv
Click on the little x next to a commit sha to go to the overview of the PR validation job. To diagnose a failure, consult the console output of the job that failed.
-See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) and [Scabot repo](https://github.com/) for full details on PR validation. One tip you should know is that commenting `/rebuild` on a PR asks validation to be run again on the same commits. This is only necessary when a spurious failure occurred.
+See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) and [Scabot repo](https://github.com/scala/scabot) for full details on PR validation. One tip you should know is that commenting `/rebuild` on a PR asks validation to be run again on the same commits. This is only necessary when a spurious failure occurred.
### Pass code review
diff --git a/README.md b/README.md
index bdb7fcbc28..7ee3cac42b 100644
--- a/README.md
+++ b/README.md
@@ -25,13 +25,14 @@ If you need some help with your PR at any time, please feel free to @-mention an
<img src="https://avatars.githubusercontent.com/dickwall" height="50px" title="Dick Wall"/> | [`@dickwall`](https://github.com/dickwall) | process & community, documentation |
<img src="https://avatars.githubusercontent.com/dragos" height="50px" title="Iulian Dragos"/> | [`@dragos`](https://github.com/dragos) | specialization, back end |
<img src="https://avatars.githubusercontent.com/axel22" height="50px" title="Aleksandr Prokopec"/> | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization |
+ <img src="https://avatars.githubusercontent.com/janekdb" height="50px" title="Janek Bogucki"/> | [`@janekdb`](https://github.com/janekdb) | documentation |
P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list!
# Handy Links
- [A wealth of documentation](http://docs.scala-lang.org)
- [Scala CI](https://scala-ci.typesafe.com/)
- - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/);
+ - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/)
- [(Deprecated) Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/)
- Scala mailing lists:
- [Compiler and standard library development](https://groups.google.com/group/scala-internals)
@@ -78,7 +79,7 @@ To pinpoint bugs, we often use git bisect, which is only effective when we can c
This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one).
-Please do not @mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)).
+Please do not @-mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @-mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)).
## Reviews
@@ -91,13 +92,18 @@ To help you plan your contributions, we communicate our plans on a regular basis
## Reviewing
-Once you've gained some experience with the code base and the process, the logical next step is to offers reviews for others's contributions. The main goal of this whole process, in the end, is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part!
+Once you've gained some experience with the code base and the process, the next step is to review the contributions of others.
+
+The main goal of this whole process is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part!
## [Labels](https://github.com/scala/scala/labels)
- - `reviewed` automatically added by scabot when a comment prefixed with LGTM is posted
- - `welcome` reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes)
- - `release-notes` reviewer / queue curator adds to make sure this PR is highlighted in the release notes
- - `on-hold` added when this PR should not yet be merged, even though CI is green
+
+Label | Description
+--------------- | -----------
+`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted
+`welcome` | reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes)
+`release-notes` | reviewer / queue curator adds to make sure this PR is highlighted in the release notes
+`on-hold` | added when this PR should not yet be merged, even though CI is green
### Tips & Tricks
Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows:
@@ -126,14 +132,23 @@ Use the latest IntelliJ IDEA release and install the Scala plugin from within th
The following steps are required to use IntelliJ IDEA on Scala trunk
- Run `ant init`. This will download some JARs to `./build/deps`, which are included in IntelliJ's classpath.
- - Run src/intellij/setup.sh
- - Open ./src/intellij/scala.ipr in IntelliJ
- - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the Java 1.6 SDK.
+ - Run `./src/intellij/setup.sh`.
+ - Open `./src/intellij/scala.ipr` in IntelliJ.
+ - `File` → `Project Structure` → `Project` → `Project SDK`. Create an SDK entry named "1.6" containing the Java 1.6 SDK.
(You may use a later SDK for local development, but the CI will verify against Java 6.)
-Compilation within IDEA is performed in "-Dlocker.skip=1" mode: the sources are built
-directly using the STARR compiler (which is downloaded from maven, according to `starr.version` in `versions.properties`).
+Compilation within IDEA is performed in `-Dlocker.skip=1` mode: the sources are built
+directly using the STARR compiler (which is downloaded from [the Central Repository](http://central.sonatype.org/), according to `starr.version` in `versions.properties`).
+
+## Building with sbt (EXPERIMENTAL)
+The experimental sbt-based build definition has arrived! Run `sbt package`
+to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
+Use `sbt test/it:test` to run integration (partest) tests.
+
+We would like to migrate to sbt build as quickly as possible. If you would
+like to help please use the scala-internals mailing list to discuss your
+ideas and coordinate your effort with others.
## Building with Ant
@@ -144,25 +159,19 @@ Verify your build using `ant test-opt`.
The Scala build system is based on Apache Ant. Most required pre-compiled
libraries are part of the repository (in 'lib/'). The following however is
-assumed to be installed on the build machine:
+assumed to be installed on the build machine: TODO
-## Building with Sbt (EXPERIMENTAL)
-
-The experimental sbt-based build definition has arrived! Run `sbt package`
-to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
-Use `sbt test/it:test` to run integration (partest) tests.
-
-We would like to migrate to sbt build as quickly as possible. If you would
-like to help please use the scala-internals mailing list to discuss your
-ideas and coordinate your effort with others.
-
-### Tips and tricks
+### Ant Tips and tricks
Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant).
- - `./pull-binary-libs.sh` downloads all binary artifacts associated with this commit.
- - `ant -p` prints out information about the commonly used ant targets.
- - `ant` or `ant build`: A quick compilation (to build/quick) of your changes using the locker compiler.
+Command | Description
+----------------------- | -----------
+`./pull-binary-libs.sh` | downloads all binary artifacts associated with this commit.
+`ant -p` | prints out information about the commonly used ant targets.
+`ant` or `ant build` | A quick compilation (to `build/quick`) of your changes using the locker compiler.
+`ant dist` | builds a distribution in 'dists/latest'.
+`ant all.clean` | removes all build files and all distributions.
A typical debug cycle incrementally builds quick, then uses it to compile and run the file
`sandbox/test.scala` as follows:
@@ -174,17 +183,14 @@ We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick
`ant test-opt` tests that your code is working and fit to be committed:
- Runs the test suite and bootstrapping test on quick.
- - You can run the suite only (skipping strap) with 'ant test.suite'.
+ - You can run the suite only (skipping strap) with `ant test.suite`.
`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick.
-Note: on most machines this requires more heap than is allocate by default. You can adjust the parameters with ANT_OPTS. Example command line:
+Note: on most machines this requires more heap than is allocated by default. You can adjust the parameters with `ANT_OPTS`. Example command line:
+```sh
+ANT_OPTS="-Xms512M -Xmx2048M -Xss1M" ant docs
```
-ANT_OPTS = "-Xms512M -Xmx2048M -Xss1M" ant docs
-```
-
- - `ant dist` builds a distribution in 'dists/latest'.
- - `ant all.clean` Removes all build files and all distributions.
### Bootstrapping concepts
NOTE: This is somewhat outdated, but the ideas still hold.
@@ -194,7 +200,7 @@ compiles Scala in layers. Each layer is a complete compiled Scala compiler and l
A superior layer is always compiled by the layer just below it. Here is a short
description of the four layers that the build uses, from bottom to top:
- - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from maven central.
+ - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from the Central Repository.
- `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`).
- `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code.
- `strap`: a test layer used to check stability of the build.
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 552c0d85f5..2d32e3e9da 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -469,6 +469,15 @@ filter {
{
matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort"
problemName=MissingMethodProblem
+ },
+ // SI-8362: AbstractPromise extends AtomicReference
+ // It's ok to change a package-protected class in an impl package,
+ // even though it's not clear why it changed -- bug in generic signature generation?
+ // -public class scala.concurrent.impl.Promise$DefaultPromise<T> extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise<T>
+ // +public class scala.concurrent.impl.Promise$DefaultPromise<T extends java.lang.Object> extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise<T>
+ {
+ matchName="scala.concurrent.impl.Promise$DefaultPromise"
+ problemName=MissingTypesProblem
}
]
}
diff --git a/docs/TODO b/docs/TODO
index 094202f53e..558aa87205 100644
--- a/docs/TODO
+++ b/docs/TODO
@@ -53,7 +53,7 @@
The process is about the same for symbols in PolyTypes. The main
difference is that type parameters may be referenced and thus we
- need something like De Bruijn indicies to represent these
+ need something like De Bruijn indices to represent these
references.
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index 706a20daf4..b2ae195dc4 100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -33,7 +33,7 @@
# - Set <MODULE>_VER to override the default, e.g. XML_VER="1.0.4".
# - The git revision is set to <MODULE>_REF="v$<MODULE>_VER". Make sure the tag exists (you can't override <MODULE>_REF).
#
-# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version nubmers for modules.
+# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version numbers for modules.
# - By default the script sets all <MODULE>_REF to "HEAD", override to build a specific revision.
# - The <MODULE>_VER is set to a nightly version, for example "1.0.3-7-g14888a2-nightly" (you can't override <MODULE>_VER)
@@ -311,7 +311,7 @@ scalaVerToBinary() {
# - the suffix starts with "-bin": 2.12.0-bin-M1
# - the patch version is > 0 : 2.12.1-M1, 1.12.3-RC2, 2.12.1-sha-nightly, 2.12.2-SNAPSHOT
#
- # Othwersise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT
+ # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT
#
# Adapted from sbt: https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39
#
@@ -513,7 +513,7 @@ bootstrap() {
echo "### Bootstrapping Scala using locker"
- # # TODO: close all open staging repos so that we can be reaonably sure the only open one we see after publishing below is ours
+ # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours
# # the ant call will create a new one
#
# Rebuild Scala with these modules so that all binary versions are consistent.
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
index e26cb796c8..72ae6e8794 100644
--- a/spec/01-lexical-syntax.md
+++ b/spec/01-lexical-syntax.md
@@ -398,40 +398,46 @@ members of type `Boolean`.
### Character Literals
```ebnf
-characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’
+characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’
```
A character literal is a single character enclosed in quotes.
-The character is either a printable unicode character or is described
-by an [escape sequence](#escape-sequences).
+The character can be any Unicode character except the single quote
+delimiter or `\u000A` (LF) or `\u000D` (CR);
+or any Unicode character represented by either a
+[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences).
> ```scala
> 'a' '\u0041' '\n' '\t'
> ```
-Note that `'\u000A'` is _not_ a valid character literal because
-Unicode conversion is done before literal parsing and the Unicode
-character `\u000A` (line feed) is not a printable
-character. One can use instead the escape sequence `'\n'` or
-the octal escape `'\12'` ([see here](#escape-sequences)).
+Note that although Unicode conversion is done early during parsing,
+so that Unicode characters are generally equivalent to their escaped
+expansion in the source text, literal parsing accepts arbitrary
+Unicode escapes, including the character literal `'\u000A'`,
+which can also be written using the escape sequence `'\n'`.
### String Literals
```ebnf
stringLiteral ::= ‘"’ {stringElement} ‘"’
-stringElement ::= printableCharNoDoubleQuote | charEscapeSeq
+stringElement ::= charNoDoubleQuoteOrNewline | UnicodeEscape | charEscapeSeq
```
-A string literal is a sequence of characters in double quotes. The
-characters are either printable unicode character or are described by
-[escape sequences](#escape-sequences). If the string literal
-contains a double quote character, it must be escaped,
-i.e. `"\""`. The value of a string literal is an instance of
-class `String`.
+A string literal is a sequence of characters in double quotes.
+The characters can be any Unicode character except the double quote
+delimiter or `\u000A` (LF) or `\u000D` (CR);
+or any Unicode character represented by either a
+[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences).
+
+If the string literal contains a double quote character, it must be escaped using
+`"\""`.
+
+The value of a string literal is an instance of class `String`.
> ```scala
-> "Hello,\nWorld!"
-> "This string contains a \" character."
+> "Hello, world!\n"
+> "\"Hello,\" replied the world."
> ```
#### Multi-Line String Literals
@@ -443,7 +449,7 @@ multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’}
A multi-line string literal is a sequence of characters enclosed in
triple quotes `""" ... """`. The sequence of characters is
-arbitrary, except that it may contain three or more consuctive quote characters
+arbitrary, except that it may contain three or more consecutive quote characters
only at the very end. Characters
must not necessarily be printable; newlines or other
control characters are also permitted. Unicode escapes work as everywhere else, but none
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
index 3a70f2a137..69828ec7fe 100644
--- a/spec/05-classes-and-objects.md
+++ b/spec/05-classes-and-objects.md
@@ -631,7 +631,7 @@ class Outer {
```
Here, accesses to the method `f` can appear anywhere within
-`OuterClass`, but not outside it. Accesses to method
+`Outer`, but not outside it. Accesses to method
`g` can appear anywhere within the package
`outerpkg.innerpkg`, as would be the case for
package-private methods in Java. Finally, accesses to method
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
index 85e288bf5f..9cd58ea346 100644
--- a/spec/06-expressions.md
+++ b/spec/06-expressions.md
@@ -1736,7 +1736,7 @@ so `scala.Any` is the type inferred for `a`.
_Eta-expansion_ converts an expression of method type to an
equivalent expression of function type. It proceeds in two steps.
-First, one identifes the maximal sub-expressions of $e$; let's
+First, one identifies the maximal sub-expressions of $e$; let's
say these are $e_1 , \ldots , e_m$. For each of these, one creates a
fresh name $x_i$. Let $e'$ be the expression resulting from
replacing every maximal subexpression $e_i$ in $e$ by the
diff --git a/spec/07-implicits.md b/spec/07-implicits.md
index 5e10373959..726320ed33 100644
--- a/spec/07-implicits.md
+++ b/spec/07-implicits.md
@@ -84,7 +84,7 @@ The _parts_ of a type $T$ are:
- if $T$ is an abstract type, the parts of its upper bound;
- if $T$ denotes an implicit conversion to a type with a method with argument types $T_1 , \ldots , T_n$ and result type $U$,
the union of the parts of $T_1 , \ldots , T_n$ and $U$;
-- the parts of quantified (existential or univeral) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`);
+- the parts of quantified (existential or universal) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`);
- in all other cases, just $T$ itself.
Note that packages are internally represented as classes with companion modules to hold the package members.
diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md
index e76035f458..8f65191312 100644
--- a/spec/12-the-scala-standard-library.md
+++ b/spec/12-the-scala-standard-library.md
@@ -171,7 +171,7 @@ Any numeric value type $T$ supports the following methods.
evaluated by converting the receiver and its argument to their
operation type and performing the given arithmetic operation of that
type.
- * Parameterless arithmethic methods identity (`+`) and negation
+ * Parameterless arithmetic methods identity (`+`) and negation
(`-`), with result type $T$. The first of these returns the
receiver unchanged, whereas the second returns its negation.
* Conversion methods `toByte`, `toShort`, `toChar`,
@@ -194,7 +194,7 @@ Integer numeric value types support in addition the following operations:
operation of that type.
* A parameterless bit-negation method (`~`). Its result type is
- the reciver type $T$ or `Int`, whichever is larger.
+ the receiver type $T$ or `Int`, whichever is larger.
The operation is evaluated by converting the receiver to the result
type and negating every bit in its value.
* Bit-shift methods left-shift (`<<`), arithmetic right-shift
@@ -745,7 +745,7 @@ object Predef {
def readf2(format: String) = Console.readf2(format)
def readf3(format: String) = Console.readf3(format)
- // Implict conversions ------------------------------------------------
+ // Implicit conversions ------------------------------------------------
...
}
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
index 7f73e107de..a4b4aae570 100644
--- a/spec/13-syntax-summary.md
+++ b/spec/13-syntax-summary.md
@@ -57,11 +57,12 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’
booleanLiteral ::= ‘true’ | ‘false’
-characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’
+characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’
stringLiteral ::= ‘"’ {stringElement} ‘"’
| ‘"""’ multiLineChars ‘"""’
-stringElement ::= (printableChar except ‘"’)
+stringElement ::= charNoDoubleQuoteOrNewline
+ | UnicodeEscape
| charEscapeSeq
multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’}
diff --git a/spec/15-changelog.md b/spec/15-changelog.md
index 3c8739359a..751a571ecc 100644
--- a/spec/15-changelog.md
+++ b/spec/15-changelog.md
@@ -31,7 +31,7 @@ formal parameter types. Added section on
[numeric widening](06-expressions.html#numeric-widening) to support
weak conformance.
-Tightened rules to avoid accidential [overrides](05-classes-and-objects.html#overriding).
+Tightened rules to avoid accidental [overrides](05-classes-and-objects.html#overriding).
Removed class literals.
@@ -53,7 +53,7 @@ has been brought in line with. From now on `+=`, has the same precedence as `=`.
#### Wildcards as function parameters
-A formal parameter to an anonymous fucntion may now be a
+A formal parameter to an anonymous function may now be a
[wildcard represented by an underscore](06-expressions.html#placeholder-syntax-for-anonymous-functions).
> _ => 7 // The function that ignores its argument
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 6c073c0b4c..091d42bb6d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -325,8 +325,7 @@ trait Reshape {
if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
vdef1::Nil
case ddef: DefDef if ddef.symbol.isLazy =>
- def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
- if (hasUnitType(ddef.symbol)) {
+ if (isUnitType(ddef.symbol.info)) {
// since lazy values of type Unit don't have val's
// we need to create them from scratch
toPreTyperLazyVal(ddef) :: Nil
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 6d24b31531..bf93ad30bc 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -16,16 +16,16 @@ import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
*
- * <p>For example, here is a complete Scala script on Unix:</pre>
- * <pre>
+ * For example, here is a complete Scala script on Unix:
+ * {{{
* #!/bin/sh
* exec scala "$0" "$@"
* !#
* Console.println("Hello, world!")
* args.toList foreach Console.println
- * </pre>
- * <p>And here is a batch file example on Windows XP:</p>
- * <pre>
+ * }}}
+ * And here is a batch file example on Windows XP:
+ * {{{
* ::#!
* @echo off
* call scala %0 %*
@@ -33,7 +33,7 @@ import util.Exceptional.unwrap
* ::!#
* Console.println("Hello, world!")
* args.toList foreach Console.println
- * </pre>
+ * }}}
*
* @author Lex Spoon
* @version 1.0, 15/05/2006
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index bf53c47e9a..332acf4a26 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -261,7 +261,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
* Create a method based on a Function
*
* Used both to under `-Ydelambdafy:method` create a lifted function and
- * under `-Ydelamdafy:inline` to create the apply method on the anonymous
+ * under `-Ydelambdafy:inline` to create the apply method on the anonymous
* class.
*
* It creates a method definition with value params cloned from the
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index b76fb3d823..f9e6a12241 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -2043,11 +2043,11 @@ self =>
/** Drop `private` modifier when followed by a qualifier.
* Contract `abstract` and `override` to ABSOVERRIDE
*/
- private def normalizeModifers(mods: Modifiers): Modifiers =
+ private def normalizeModifiers(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
- normalizeModifers(mods &~ Flags.PRIVATE)
+ normalizeModifiers(mods &~ Flags.PRIVATE)
else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
- normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+ normalizeModifiers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
@@ -2092,7 +2092,7 @@ self =>
* AccessModifier ::= (private | protected) [AccessQualifier]
* }}}
*/
- def accessModifierOpt(): Modifiers = normalizeModifers {
+ def accessModifierOpt(): Modifiers = normalizeModifiers {
in.token match {
case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
case _ => NoMods
@@ -2106,7 +2106,7 @@ self =>
* | override
* }}}
*/
- def modifiers(): Modifiers = normalizeModifers {
+ def modifiers(): Modifiers = normalizeModifiers {
def loop(mods: Modifiers): Modifiers = in.token match {
case PRIVATE | PROTECTED =>
loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 92833d647b..51bb0d3c5b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -226,7 +226,7 @@ trait Scanners extends ScannersCommon {
* RPAREN if region starts with '('
* RBRACKET if region starts with '['
* RBRACE if region starts with '{'
- * ARROW if region starts with `case'
+ * ARROW if region starts with 'case'
* STRINGLIT if region is a string interpolation expression starting with '${'
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
@@ -515,7 +515,7 @@ trait Scanners extends ScannersCommon {
charLitOr(getIdentRest)
else if (isOperatorPart(ch) && (ch != '\\'))
charLitOr(getOperatorRest)
- else {
+ else if (!isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) {
getLitChar()
if (ch == '\'') {
nextChar()
@@ -525,6 +525,8 @@ trait Scanners extends ScannersCommon {
syntaxError("unclosed character literal")
}
}
+ else
+ syntaxError("unclosed character literal")
}
fetchSingleQuote()
case '.' =>
@@ -690,7 +692,7 @@ trait Scanners extends ScannersCommon {
private def unclosedStringLit(): Unit = syntaxError("unclosed string literal")
- private def getRawStringLit(): Unit = {
+ @tailrec private def getRawStringLit(): Unit = {
if (ch == '\"') {
nextRawChar()
if (isTripleQuote()) {
@@ -707,7 +709,7 @@ trait Scanners extends ScannersCommon {
}
}
- @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 01eff71057..a927097b62 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -2016,7 +2016,7 @@ abstract class GenICode extends SubComponent {
*
* This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
* handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
- * Later reachability analysis will remove unreacahble code.
+ * Later reachability analysis will remove unreachable code.
*/
def Try(body: Context => Context,
handlers: List[(Symbol, TypeKind, Context => Context)],
@@ -2060,7 +2060,7 @@ abstract class GenICode extends SubComponent {
if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) {
if (finalizer != EmptyTree) {
val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
- this.addActiveHandler(exh) // .. and body aswell
+ this.addActiveHandler(exh) // .. and body as well
val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
exhStartCtx.bb killIf outerCtx.bb.ignore
val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index 843648282b..0f17b5d694 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -20,7 +20,7 @@ abstract class ICodeCheckers {
* </p>
* <ul>
* <li>
- * for primitive operations: the type and numer of operands match
+ * for primitive operations: the type and number of operands match
* the type of the operation
* </li>
* <li>
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 22ac8f84d4..e5eb0b79d5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -92,8 +92,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genThrow(expr: Tree): BType = {
val thrownKind = tpeTK(expr)
- // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable.
- // Similarly for scala.Nothing (again, as defined in src/libray-aux).
+ // `throw null` is valid although scala.Null (as defined in src/library-aux) isn't a subtype of Throwable.
+ // Similarly for scala.Nothing (again, as defined in src/library-aux).
assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference).get)
genLoad(expr, thrownKind)
lineNumber(expr)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
index c8ba0d9dbc..aff2d2d8c9 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -1144,7 +1144,7 @@ object BTypes {
* The map is indexed by the string s"$name$descriptor" (to
* disambiguate overloads).
*
- * @param warning Contains an warning message if an error occured when building this
+ * @param warning Contains an warning message if an error occurred when building this
* InlineInfo, for example if some classfile could not be found on
* the classpath. This warning can be reported later by the inliner.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 7096f16d75..7883ce7ffa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -492,8 +492,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
* generic classes or interfaces.
*
* @param superName the internal of name of the super class. For interfaces,
- * the super class is {@link Object}. May be <tt>null</tt>, but
- * only for the {@link Object} class.
+ * the super class is [[Object]]. May be <tt>null</tt>, but
+ * only for the [[Object]] class.
*
* @param interfaces the internal names of the class's interfaces (see
* {@link Type#getInternalName() getInternalName}). May be
@@ -529,6 +529,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") =>
reporter.error(sym.pos,
s"Could not write class $jclassName because it exceeds JVM code size limits. ${e.getMessage}")
+ case e: java.io.IOException if e.getMessage != null && (e.getMessage contains "File name too long") =>
+ reporter.error(sym.pos, e.getMessage + "\n" +
+ "This can happen on some encrypted or legacy file systems. Please see SI-3623 for more details.")
+
}
}
@@ -2693,7 +2697,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self =>
case CMPG =>
(kind: @unchecked) match {
case FLOAT => emit(Opcodes.FCMPG)
- case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
+ case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se6/html/Instructions2.doc3.html
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala
index c50648acd9..f1ac703532 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala
@@ -1,7 +1,7 @@
package scala.tools.nsc.backend.jvm
/**
- * Summary on the ASM ananlyzer framework
+ * Summary on the ASM analyzer framework
* --------------------------------------
*
* Value
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
index 5d3b7ff272..fb7dd16909 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala
@@ -367,7 +367,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) {
}
/**
- * Stores a local varaible index the opcode offset required for operating on that variable.
+ * Stores a local variable index the opcode offset required for operating on that variable.
*
* The xLOAD / xSTORE opcodes are in the following sequence: I, L, F, D, A, so the offset for
* a local variable holding a reference (`A`) is 4. See also method `getOpcode` in [[scala.tools.asm.Type]].
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
index 20256ca63b..baa747492f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -341,7 +341,7 @@ class Inliner[BT <: BTypes](val btypes: BT) {
clonedInstructions.insert(argStores)
- // label for the exit of the inlined functions. xRETURNs are rplaced by GOTOs to this label.
+ // label for the exit of the inlined functions. xRETURNs are replaced by GOTOs to this label.
val postCallLabel = newLabelNode
clonedInstructions.add(postCallLabel)
@@ -704,9 +704,9 @@ class Inliner[BT <: BTypes](val btypes: BT) {
// - a method name+type
//
// execution [3]
- // - resolve the CSP, yielding the boostrap method handle, the static args and the name+type
+ // - resolve the CSP, yielding the bootstrap method handle, the static args and the name+type
// - resolution entails accessibility checking [4]
- // - execute the `invoke` method of the boostrap method handle (which is signature polymorphic, check its javadoc)
+ // - execute the `invoke` method of the bootstrap method handle (which is signature polymorphic, check its javadoc)
// - the descriptor for the call is made up from the actual arguments on the stack:
// - the first parameters are "MethodHandles.Lookup, String, MethodType", then the types of the constant arguments,
// - the return type is CallSite
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index 2967f67e9c..efb026cdff 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -154,7 +154,7 @@ object Jar {
def update(key: Attributes.Name, value: String) = attrs.put(key, value)
}
- // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
+ // See http://docs.oracle.com/javase/7/docs/api/java/nio/file/Path.html
// for some ideas.
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 79776485de..82e7c76409 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -111,7 +111,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
impl setInfo new LazyImplClassType(iface)
}
- /** Return the implementation class of a trait; create a new one of one does not yet exist */
+ /** Return the implementation class of a trait; create a new one if one does not yet exist */
def implClass(iface: Symbol): Symbol = {
iface.info
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 7c66bda46b..48eb878e2a 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -137,7 +137,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* and thus may only be accessed from value or method definitions owned by the current class
* (ie there's no point drilling down into nested classes).
*
- * (d) regarding candidates in (b), they are accesible from all places listed in (c) and in addition
+ * (d) regarding candidates in (b), they are accessible from all places listed in (c) and in addition
* from nested classes (nested at any number of levels).
*
* In all cases, we're done with traversing as soon as all candidates have been ruled out.
@@ -396,9 +396,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
if (stat1 eq stat) {
- assert(ctorParams(genericClazz).length == constrInfo.constrParams.length)
+ assert(ctorParams(genericClazz).length == primaryConstrParams.length)
// this is just to make private fields public
- (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1)
+ (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1)
val stat2 = rewriteArrayUpdate(stat1)
// statements coming from the original class need retyping in the current context
@@ -435,16 +435,16 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// postfix = postfix.tail
// }
- if (shouldGuard && usesSpecializedField && stats.nonEmpty) {
+ if (guardSpecializedFieldInit && intoConstructor.usesSpecializedField && stats.nonEmpty) {
// save them for duplication in the specialized subclass
guardedCtorStats(clazz) = stats
- ctorParams(clazz) = constrInfo.constrParams
+ ctorParams(clazz) = primaryConstrParams
val tree =
If(
Apply(
CODE.NOT (
- Apply(gen.mkAttributedRef(specializedFlag), List())),
+ Apply(gen.mkAttributedRef(hasSpecializedFieldsSym), List())),
List()),
Block(stats, Literal(Constant(()))),
EmptyTree)
@@ -476,27 +476,17 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
val stats = impl.body // the transformed template body
val localTyper = typer.atOwner(impl, clazz)
- val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
- val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
-
- val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
-
- case class ConstrInfo(
- constr: DefDef, // The primary constructor
- constrParams: List[Symbol], // ... and its parameters
- constrBody: Block // ... and its body
- )
- // decompose primary constructor into the three entities above.
- val constrInfo: ConstrInfo = {
- val ddef = (stats find (_.symbol.isPrimaryConstructor))
- ddef match {
- case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
- case x =>
- abort("no constructor in template: impl = " + impl)
- }
- }
- import constrInfo._
+ val hasSpecializedFieldsSym = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+ // The constructor of a non-specialized class that has specialized subclasses
+ // should use `q"${hasSpecializedFieldsSym}()"` to guard the initialization of specialized fields.
+ val guardSpecializedFieldInit = (hasSpecializedFieldsSym != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
+
+ val isDelayedInitSubclass = clazz isSubClass DelayedInitClass
+
+ // find and dissect primary constructor
+ val (primaryConstr, primaryConstrParams, primaryConstrBody) = stats collectFirst {
+ case dd@DefDef(_, _, _, vps :: Nil, _, rhs: Block) if dd.symbol.isPrimaryConstructor => (dd, vps map (_.symbol), rhs)
+ } getOrElse abort("no constructor in template: impl = " + impl)
// The parameter accessor fields which are members of the class
val paramAccessors = clazz.constrParamAccessors
@@ -509,27 +499,26 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
def parameterNamed(name: Name): Symbol = {
def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
- (constrParams filter matchesName) match {
- case Nil => abort(name + " not in " + constrParams)
+ primaryConstrParams filter matchesName match {
+ case Nil => abort(name + " not in " + primaryConstrParams)
case p :: _ => p
}
}
- /*
- * `usesSpecializedField` makes a difference in deciding whether constructor-statements
- * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of
- * one or more specialized sub-classes.
- *
- * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
- * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with.
- * That way, trips to a map in `specializeTypes` are saved.
- */
- var usesSpecializedField: Boolean = false
-
// A transformer for expressions that go into the constructor
- private class IntoCtorTransformer extends Transformer {
-
- private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz)
+ object intoConstructor extends Transformer {
+ /*
+ * `usesSpecializedField` makes a difference in deciding whether constructor-statements
+ * should be guarded in a `guardSpecializedFieldInit` class, ie in a class that's the generic super-class of
+ * one or more specialized sub-classes.
+ *
+ * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
+ * we skip setting `usesSpecializedField` in case the current class isn't `guardSpecializedFieldInit` to start with.
+ * That way, trips to a map in `specializeTypes` are saved.
+ */
+ var usesSpecializedField: Boolean = false
+
+ private def isParamRef(sym: Symbol) = sym.isParamAccessor && sym.owner == clazz
// Terminology: a stationary location is never written after being read.
private def isStationaryParamRef(sym: Symbol) = (
@@ -548,10 +537,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* (b.2) the constructor in the specialized (sub-)class.
* (c) isn't part of a DelayedInit subclass.
*/
- private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym))
+ private def canBeSupplanted(sym: Symbol) = !isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym)
override def transform(tree: Tree): Tree = tree match {
-
case Apply(Select(This(_), _), List()) =>
// references to parameter accessor methods of own class become references to parameters
// outer accessors become references to $outer parameter
@@ -566,7 +554,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// references to parameter accessor field of own class become references to parameters
gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField`
+ case Select(_, _) if guardSpecializedFieldInit => // reasoning behind this guard in the docu of `usesSpecializedField`
if (possiblySpecialized(tree.symbol)) {
usesSpecializedField = true
}
@@ -576,18 +564,10 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
super.transform(tree)
}
- }
-
- private val intoConstructorTransformer = new IntoCtorTransformer
-
- // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
- def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
-
- // Should tree be moved in front of super constructor call?
- def canBeMoved(tree: Tree) = tree match {
- case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
- case _ => false
+ // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
+ def apply(oldowner: Symbol, tree: Tree) =
+ if (tree eq EmptyTree) tree
+ else transform(tree.changeOwner(oldowner -> primaryConstr.symbol))
}
// Create an assignment to class field `to` with rhs `from`
@@ -629,10 +609,10 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
val classInitStatBuf = new ListBuffer[Tree]
// generate code to copy pre-initialized fields
- for (stat <- constrBody.stats) {
+ for (stat <- primaryConstrBody.stats) {
constrStatBuf += stat
stat match {
- case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
+ case ValDef(mods, name, _, _) if mods hasFlag PRESUPER =>
// stat is the constructor-local definition of the field value
val fields = presupers filter (_.getterName == name)
assert(fields.length == 1)
@@ -643,45 +623,48 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
}
- // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
- for (stat <- stats) stat match {
- case DefDef(_,_,_,_,_,rhs) =>
- // methods with constant result type get literals as their body
- // all methods except the primary constructor go into template
- stat.symbol.tpe match {
- case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
- case _ =>
- if (stat.symbol.isPrimaryConstructor) ()
- else if (stat.symbol.isConstructor) auxConstructorBuf += stat
- else defBuf += stat
- }
- case ValDef(mods, _, _, rhs) if !mods.hasStaticFlag =>
+
+ for (stat <- stats) {
+ val statSym = stat.symbol
+
+ stat match {
+ // recurse on class definition, store in defBuf
+ case _: ClassDef => defBuf += new ConstructorTransformer(unit).transform(stat)
+
+ // methods (except primary constructor) go into template
+ // (non-primary ctors --> auxConstructorBuf / regular defs --> defBuf)
+ case _: DefDef if statSym.isPrimaryConstructor => ()
+ case _: DefDef if statSym.isConstructor => auxConstructorBuf += stat
+ case _: DefDef => defBuf += stat
+
// val defs with constant right-hand sides are eliminated.
- // for all other val defs, an empty valdef goes into the template and
- // the initializer goes as an assignment into the constructor
- // if the val def is an early initialized or a parameter accessor, it goes
- // before the superclass constructor call, otherwise it goes after.
- // Lazy vals don't get the assignment in the constructor.
- if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs)
- (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
- stat.symbol, rhs1)
+ case _: ValDef if statSym.info.isInstanceOf[ConstantType] => ()
+
+ // For all other val defs, an empty valdef goes into the template.
+ // Additionally, non-lazy vals are initialized by an assignment in:
+ // - the class initializer (static),
+ // - the constructor, before the super call (early initialized or a parameter accessor),
+ // - the constructor, after the super call (regular val).
+ case ValDef(mods, _, _, rhs) =>
+ val initializingRhs =
+ if (statSym.isLazy) EmptyTree
+ else if (!mods.hasStaticFlag) intoConstructor(statSym, rhs)
+ else rhs
+
+ if (initializingRhs ne EmptyTree) {
+ val initPhase =
+ if (mods hasFlag STATIC) classInitStatBuf
+ else if (mods hasFlag PRESUPER | PARAMACCESSOR) constrPrefixBuf
+ else constrStatBuf
+
+ initPhase += mkAssign(statSym, initializingRhs)
}
+
defBuf += deriveValDef(stat)(_ => EmptyTree)
- }
- case ValDef(_, _, _, rhs) =>
- // Add static initializer statements to classInitStatBuf and remove the rhs from the val def.
- classInitStatBuf += mkAssign(stat.symbol, rhs)
- defBuf += deriveValDef(stat)(_ => EmptyTree)
-
- case ClassDef(_, _, _, _) =>
- // classes are treated recursively, and left in the template
- defBuf += new ConstructorTransformer(unit).transform(stat)
- case _ =>
+
// all other statements go into the constructor
- constrStatBuf += intoConstructor(impl.symbol, stat)
+ case _ => constrStatBuf += intoConstructor(impl.symbol, stat)
+ }
}
populateOmittables()
@@ -716,12 +699,12 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
rewriteDelayedInit()
// Assemble final constructor
- defBuf += deriveDefDef(constr)(_ =>
+ defBuf += deriveDefDef(primaryConstr)(_ =>
treeCopy.Block(
- constrBody,
+ primaryConstrBody,
paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr))
+ primaryConstrBody.expr))
// Followed by any auxiliary constructors
defBuf ++= auxConstructorBuf
@@ -732,6 +715,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// Eliminate all field definitions that can be dropped from template
val templateWithoutOmittables: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustBeKept(stat.symbol)))
+
// Add the static initializers
val transformed: Template = addStaticInits(templateWithoutOmittables, classInitStatBuf, localTyper)
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index ddf003bb98..33fd5d0461 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -281,7 +281,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val parents = addSerializable(abstractFunctionErasedType)
val funOwner = originalFunction.symbol.owner
- // TODO harmonize the naming of delamdafy anon-fun classes with those spun up by Uncurry
+ // TODO harmonize the naming of delambdafy anon-fun classes with those spun up by Uncurry
// - make `anonClass.isAnonymousClass` true.
// - use `newAnonymousClassSymbol` or push the required variations into a similar factory method
// - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash`
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index a079a76ce7..00a994fe87 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -778,7 +778,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
- val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ val rhs: Tree = gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats).changeOwner(currentOwner -> defSym)
val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal))))
defSym
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 72e2174bf8..57639a94c7 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -582,6 +582,7 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
+ val ddSym = dd.symbol
val (newParamss, newRhs): (List[List[ValDef]], Tree) =
if (dependentParamTypeErasure isDependent dd)
dependentParamTypeErasure erase dd
@@ -593,11 +594,22 @@ abstract class UnCurry extends InfoTransform
(vparamss1, rhs0)
}
+ // A no-arg method with ConstantType result type can safely be reduced to the corresponding Literal
+ // (only pure methods are typed as ConstantType). We could also do this for methods with arguments,
+ // after ensuring the arguments are not referenced.
+ val literalRhsIfConst =
+ if (newParamss.head.isEmpty) { // We know newParamss.length == 1 from above
+ ddSym.info.resultType match {
+ case tp@ConstantType(value) => Literal(value) setType tp setPos newRhs.pos // inlining of gen.mkAttributedQualifier(tp)
+ case _ => newRhs
+ }
+ } else newRhs
+
val flatdd = copyDefDef(dd)(
vparamss = newParamss,
- rhs = nonLocalReturnKeys get dd.symbol match {
- case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
- case None => newRhs
+ rhs = nonLocalReturnKeys get ddSym match {
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(literalRhsIfConst, k, ddSym))
+ case None => literalRhsIfConst
}
)
addJavaVarargsForwarders(dd, flatdd)
@@ -691,9 +703,46 @@ abstract class UnCurry extends InfoTransform
// declared type and assign this to a synthetic val. Later, we'll patch
// the method body to refer to this, rather than the parameter.
val tempVal: ValDef = {
+ // SI-9442: using the "uncurry-erased" type (the one after the uncurry phase) can lead to incorrect
+ // tree transformations. For example, compiling:
+ // ```
+ // def foo(c: Ctx)(l: c.Tree): Unit = {
+ // val l2: c.Tree = l
+ // }
+ // ```
+ // Results in the following AST:
+ // ```
+ // def foo(c: Ctx, l: Ctx#Tree): Unit = {
+ // val l$1: Ctx#Tree = l.asInstanceOf[Ctx#Tree]
+ // val l2: c.Tree = l$1 // no, not really, it's not.
+ // }
+ // ```
+ // Of course, this is incorrect, since `l$1` has type `Ctx#Tree`, which is not a subtype of `c.Tree`.
+ //
+ // So what we need to do is to use the pre-uncurry type when creating `l$1`, which is `c.Tree` and is
+ // correct. Now, there are two additional problems:
+ // 1. when varargs and byname params are involved, the uncurry transformation desugares these special
+ // cases to actual typerefs, eg:
+ // ```
+ // T* ~> Seq[T] (Scala-defined varargs)
+ // T* ~> Array[T] (Java-defined varargs)
+ // =>T ~> Function0[T] (by name params)
+ // ```
+ // we use the DesugaredParameterType object (defined in scala.reflect.internal.transform.UnCurry)
+ // to redo this desugaring manually here
+ // 2. the type needs to be normalized, since `gen.mkCast` checks this (no HK here, just aliases have
+ // to be expanded before handing the type to `gen.mkAttributedCast`, which calls `gen.mkCast`)
+ val info0 =
+ enteringUncurry(p.symbol.info) match {
+ case DesugaredParameterType(desugaredTpe) =>
+ desugaredTpe
+ case tpe =>
+ tpe
+ }
+ val info = info0.normalize
val tempValName = unit freshTermName (p.name + "$")
- val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
- atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), info)))
}
Packed(newParam, tempVal)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 8bcd5e6f12..2b6a4c763a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -44,7 +44,7 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
- * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * We evaluate "X with conform to P" by checking `X <: P_wild`, where
* P_wild is the result of substituting wildcard types in place of
* pattern type variables. This is intentionally stricter than
* (X matchesPattern P), see SI-8597 for motivating test cases.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index b0bd9977a8..c9d3b3da96 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -1105,7 +1105,7 @@ trait ContextErrors {
def GetterDefinedTwiceError(getter: Symbol) =
issueSymbolTypeError(getter, getter+" is defined twice")
- def ValOrValWithSetterSuffixError(tree: Tree) =
+ def ValOrVarWithSetterSuffixError(tree: Tree) =
issueNormalTypeError(tree, "Names of vals or vars may not end in `_='")
def PrivateThisCaseClassParameterError(tree: Tree) =
@@ -1212,7 +1212,8 @@ trait ContextErrors {
import definitions._
- def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
+ def AmbiguousImplicitError(info1: ImplicitInfo, tree1: Tree,
+ info2: ImplicitInfo, tree2: Tree,
pre1: String, pre2: String, trailer: String)
(isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
@@ -1248,10 +1249,20 @@ trait ContextErrors {
if (explanation == "") "" else "\n" + explanation
)
}
+
+ def treeTypeArgs(annotatedTree: Tree) = annotatedTree match {
+ case TypeApply(_, args) => args.map(_.toString)
+ case _ => Nil
+ }
+
context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
- if (isView) viewMsg
- else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
- )
+ (tree1.symbol, tree2.symbol) match {
+ case (ImplicitAmbiguousMsg(msg), _) => msg.format(treeTypeArgs(tree1))
+ case (_, ImplicitAmbiguousMsg(msg)) => msg.format(treeTypeArgs(tree2))
+ case (_, _) if isView => viewMsg
+ case (_, _) => s"ambiguous implicit values:\n${coreMsg}match expected type $pt"
+ }
+ ))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 196b137a3e..2fb3ae41e2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -110,10 +110,10 @@ trait Implicits {
* Ignore their constr field! The list of type constraints returned along with each tree specifies the constraints that
* must be met by the corresponding type parameter in `tpars` (for the returned implicit view to be valid).
*
- * @arg tp from-type for the implicit conversion
- * @arg context search implicits here
- * @arg tpars symbols that should be considered free type variables
- * (implicit search should not try to solve them, just track their constraints)
+ * @param tp from-type for the implicit conversion
+ * @param context search implicits here
+ * @param tpars symbols that should be considered free type variables
+ * (implicit search should not try to solve them, just track their constraints)
*/
def allViewsFrom(tp: Type, context: Context, tpars: List[Symbol]): List[(SearchResult, List[TypeConstraint])] = {
// my untouchable typevars are better than yours (they can't be constrained by them)
@@ -324,8 +324,10 @@ trait Implicits {
*/
class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors {
val searchId = implicitSearchId()
- private def typingLog(what: String, msg: => String) =
- typingStack.printTyping(tree, f"[search #$searchId] $what $msg")
+ private def typingLog(what: String, msg: => String) = {
+ if (printingOk(tree))
+ typingStack.printTyping(f"[search #$searchId] $what $msg")
+ }
import infer._
if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
@@ -885,7 +887,7 @@ trait Implicits {
* - find the most likely one
* - if it matches, forget about all others it improves upon
*/
- @tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
+ @tailrec private def rankImplicits(pending: Infos, acc: List[(SearchResult, ImplicitInfo)]): List[(SearchResult, ImplicitInfo)] = pending match {
case Nil => acc
case firstPending :: otherPending =>
def firstPendingImproves(alt: ImplicitInfo) =
@@ -912,7 +914,7 @@ trait Implicits {
val pendingImprovingBest = undoLog undo {
otherPending filterNot firstPendingImproves
}
- rankImplicits(pendingImprovingBest, firstPending :: acc)
+ rankImplicits(pendingImprovingBest, (newBest, firstPending) :: acc)
}
}
@@ -928,14 +930,14 @@ trait Implicits {
// So if there is any element not improved upon by the first it is an error.
rankImplicits(eligible, Nil) match {
case Nil => ()
- case chosen :: rest =>
- rest find (alt => !improves(chosen, alt)) match {
- case Some(competing) =>
- AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context)
+ case (chosenResult, chosenInfo) :: rest =>
+ rest find { case (_, alt) => !improves(chosenInfo, alt) } match {
+ case Some((competingResult, competingInfo)) =>
+ AmbiguousImplicitError(chosenInfo, chosenResult.tree, competingInfo, competingResult.tree, "both", "and", "")(isView, pt, tree)(context)
return AmbiguousSearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala
case _ =>
- if (isView) chosen.useCountView += 1
- else chosen.useCountArg += 1
+ if (isView) chosenInfo.useCountView += 1
+ else chosenInfo.useCountArg += 1
}
}
@@ -1445,9 +1447,9 @@ trait Implicits {
}
}
- object ImplicitNotFoundMsg {
- def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match {
- case Some(m) => Some(new Message(sym, m))
+ class ImplicitAnnotationMsg(f: Symbol => Option[String], clazz: Symbol, annotationName: String) {
+ def unapply(sym: Symbol): Option[(Message)] = f(sym) match {
+ case Some(m) => Some(new Message(sym, m, annotationName))
case None if sym.isAliasType =>
// perform exactly one step of dealiasing
// this is necessary because ClassManifests are now aliased to ClassTags
@@ -1459,41 +1461,45 @@ trait Implicits {
// check the message's syntax: should be a string literal that may contain occurrences of the string "${X}",
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
- sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match {
- case Some(m) => new Message(sym, m).validate
- case None => Some("Missing argument `msg` on implicitNotFound annotation.")
+ sym.getAnnotation(clazz).flatMap(_.stringArg(0) match {
+ case Some(m) => new Message(sym, m, annotationName).validate
+ case None => Some(s"Missing argument `msg` on $annotationName annotation.")
})
+ }
+
+ object ImplicitNotFoundMsg extends ImplicitAnnotationMsg(_.implicitNotFoundMsg, ImplicitNotFoundClass, "implicitNotFound")
+ object ImplicitAmbiguousMsg extends ImplicitAnnotationMsg(_.implicitAmbiguousMsg, ImplicitAmbiguousClass, "implicitAmbiguous")
+
+ class Message(sym: Symbol, msg: String, annotationName: String) {
// http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html
private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r
- class Message(sym: Symbol, msg: String) {
- private def interpolate(text: String, vars: Map[String, String]) =
- Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
- case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
+ private def interpolate(text: String, vars: Map[String, String]) =
+ Intersobralator.replaceAllIn(text, (_: Regex.Match) match {
+ case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "")
// #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw)
- })
+ })
- private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
- private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
+ private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
+ private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
- def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
+ def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
- def format(typeArgs: List[String]): String =
- interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
+ def format(typeArgs: List[String]): String =
+ interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
- def validate: Option[String] = {
- val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
- val decls = typeParamNames.toSet
+ def validate: Option[String] = {
+ val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet
+ val decls = typeParamNames.toSet
- (refs &~ decls) match {
- case s if s.isEmpty => None
- case unboundNames =>
- val singular = unboundNames.size == 1
- val ess = if (singular) "" else "s"
- val bee = if (singular) "is" else "are"
- Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @implicitNotFound annotation $bee not defined by $sym.")
- }
+ (refs &~ decls) match {
+ case s if s.isEmpty => None
+ case unboundNames =>
+ val singular = unboundNames.size == 1
+ val ess = if (singular) "" else "s"
+ val bee = if (singular) "is" else "are"
+ Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @$annotationName annotation $bee not defined by $sym.")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index bf705e89ad..a5fdbb5148 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1375,7 +1375,7 @@ trait Infer extends Checkable {
* Otherwise, if there is no best alternative, error.
*
* @param argtpes0 contains the argument types. If an argument is named, as
- * "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
+ * "a = 3", the corresponding type is `NamedType("a", Int)`. If the name
* of some NamedType does not exist in an alternative's parameter names,
* the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 99dd81c7e2..3ed128cbc5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -55,6 +55,13 @@ trait Macros extends MacroRuntimes with Traces with Helpers {
def globalSettings = global.settings
+ /** Obtains a `ClassLoader` instance used for macro expansion.
+ *
+ * By default a new `ScalaClassLoader` is created using the classpath
+ * from global and the classloader of self as parent.
+ *
+ * Mirrors with runtime definitions (e.g. Repl) need to adjust this method.
+ */
protected def findMacroClassLoader(): ClassLoader = {
val classpath = global.classPath.asURLs
macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
@@ -658,7 +665,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers {
//
// Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
// the undetermined type params. Therefore we need to do something ourselves or otherwise this
- // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+ // expandee will forever remain not expanded (see SI-5692). A traditional way out of this conundrum
// is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
// but sometimes, if the inferencer lacks information, it will be forced to approximate.
//
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index f3856db552..f3632b144d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -134,28 +134,28 @@ trait MethodSynthesis {
ImplicitClassWrapper(tree).createAndEnterSymbol()
}
- def enterGetterSetter(tree: ValDef) {
- val ValDef(mods, name, _, _) = tree
- if (nme.isSetterName(name))
- ValOrValWithSetterSuffixError(tree)
-
- tree.symbol = (
- if (mods.isLazy) {
+ // TODO: see if we can link symbol creation & tree derivation by sharing the Field/Getter/Setter factories
+ def enterGetterSetter(tree: ValDef): Unit = {
+ tree.symbol =
+ if (tree.mods.isLazy) {
val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol()
enterLazyVal(tree, lazyValGetter)
} else {
- if (mods.isPrivateLocal)
- PrivateThisCaseClassParameterError(tree)
- val getter = Getter(tree).createAndEnterSymbol()
+ val getter = Getter(tree)
+ val getterSym = getter.createAndEnterSymbol()
+
// Create the setter if necessary.
- if (mods.isMutable)
+ if (getter.needsSetter)
Setter(tree).createAndEnterSymbol()
- // If abstract, the tree gets the getter's symbol. Otherwise, create a field.
- if (mods.isDeferred) getter setPos tree.pos
+ // If the getter's abstract the tree gets the getter's symbol,
+ // otherwise, create a field (assume the getter requires storage).
+ // NOTE: we cannot look at symbol info, since we're in the process of deriving them
+ // (luckily, they only matter for lazy vals, which we've ruled out in this else branch,
+ // and `doNotDeriveField` will skip them if `!mods.isLazy`)
+ if (Field.noFieldFor(tree)) getterSym setPos tree.pos
else enterStrictVal(tree)
}
- )
enterBeans(tree)
}
@@ -177,11 +177,11 @@ trait MethodSynthesis {
}
def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
- case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
+ case vd @ ValDef(mods, name, tpt, rhs) if deriveAccessors(vd) && !vd.symbol.isModuleVar =>
// If we don't save the annotations, they seem to wander off.
val annotations = stat.symbol.initialize.annotations
val trees = (
- allValDefDerived(vd)
+ (field(vd) ::: standardAccessors(vd) ::: beanAccessors(vd))
map (acc => atPos(vd.pos.focus)(acc derive annotations))
filterNot (_ eq EmptyTree)
)
@@ -221,11 +221,14 @@ trait MethodSynthesis {
stat :: Nil
}
- def standardAccessors(vd: ValDef): List[DerivedFromValDef] = (
- if (vd.mods.isMutable && !vd.mods.isLazy) List(Getter(vd), Setter(vd))
- else if (vd.mods.isLazy) List(LazyValGetter(vd))
- else List(Getter(vd))
- )
+ def standardAccessors(vd: ValDef): List[DerivedFromValDef] =
+ if (vd.mods.isLazy) List(LazyValGetter(vd))
+ else {
+ val getter = Getter(vd)
+ if (getter.needsSetter) List(getter, Setter(vd))
+ else List(getter)
+ }
+
def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
if (vd.symbol hasAnnotation BeanPropertyAttr)
@@ -234,15 +237,8 @@ trait MethodSynthesis {
BooleanBeanGetter(vd) :: setter
else Nil
}
- def allValDefDerived(vd: ValDef) = {
- val field = if (vd.mods.isDeferred || (vd.mods.isLazy && hasUnitType(vd.symbol))) Nil
- else List(Field(vd))
- field ::: standardAccessors(vd) ::: beanAccessors(vd)
- }
- // Take into account annotations so that we keep annotated unit lazy val
- // to get better error message already from the cps plugin itself
- def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
+ def field(vd: ValDef): List[Field] = if (Field.noFieldFor(vd)) Nil else List(Field(vd))
/** This trait assembles what's needed for synthesizing derived methods.
* Important: Typically, instances of this trait are created TWICE for each derived
@@ -260,7 +256,6 @@ trait MethodSynthesis {
def name: TermName
/** The flags that are retained from the original symbol */
-
def flagsMask: Long
/** The flags that the derived symbol has in addition to those retained from
@@ -284,8 +279,9 @@ trait MethodSynthesis {
def enclClass: Symbol
// Final methods to make the rest easier to reason about.
- final def mods = tree.mods
- final def basisSym = tree.symbol
+ final def mods = tree.mods
+ final def basisSym = tree.symbol
+ final def derivedMods = mods & flagsMask | flagsExtra
}
sealed trait DerivedFromClassDef extends DerivedFromMemberDef {
@@ -305,7 +301,6 @@ trait MethodSynthesis {
/* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
- final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
def derivedSym: Symbol = tree.symbol
def derivedTree: Tree = EmptyTree
@@ -314,8 +309,8 @@ trait MethodSynthesis {
def isDeferred = mods.isDeferred
def keepClean = false // whether annotations whose definitions are not meta-annotated should be kept.
def validate() { }
- def createAndEnterSymbol(): Symbol = {
- val sym = owner.newMethod(name, tree.pos.focus, (tree.mods.flags & flagsMask) | flagsExtra)
+ def createAndEnterSymbol(): MethodSymbol = {
+ val sym = owner.newMethod(name, tree.pos.focus, derivedMods.flags)
setPrivateWithin(tree, sym)
enterInScope(sym)
sym setInfo completer(sym)
@@ -333,7 +328,8 @@ trait MethodSynthesis {
}
}
sealed trait DerivedGetter extends DerivedFromValDef {
- // TODO
+ // A getter must be accompanied by a setter if the ValDef is mutable.
+ def needsSetter = mods.isMutable
}
sealed trait DerivedSetter extends DerivedFromValDef {
override def isSetter = true
@@ -341,10 +337,13 @@ trait MethodSynthesis {
case (p :: Nil) :: _ => p
case _ => NoSymbol
}
- private def setterRhs = (
- if (mods.isDeferred || derivedSym.isOverloaded) EmptyTree
+
+ private def setterRhs = {
+ assert(!derivedSym.isOverloaded, s"Unexpected overloaded setter $derivedSym for $basisSym in $enclClass")
+ if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree
else Assign(fieldSelection, Ident(setterParam))
- )
+ }
+
private def setterDef = DefDef(derivedSym, setterRhs)
override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef
}
@@ -363,8 +362,7 @@ trait MethodSynthesis {
context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}")
result
}
- def derivedTree: DefDef =
- factoryMeth(mods & flagsMask | flagsExtra, name, tree)
+ def derivedTree: DefDef = factoryMeth(derivedMods, name, tree)
def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
def flagsMask: Long = AccessFlags
def name: TermName = tree.name.toTermName
@@ -385,8 +383,9 @@ trait MethodSynthesis {
}
}
case class Getter(tree: ValDef) extends BaseGetter(tree) {
- override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getterIn(enclClass)
- private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection
+ override def derivedSym = if (Field.noFieldFor(tree)) basisSym else basisSym.getterIn(enclClass)
+ private def derivedRhs = if (Field.noFieldFor(tree)) tree.rhs else fieldSelection
+
private def derivedTpt = {
// For existentials, don't specify a type for the getter, even one derived
// from the symbol! This leads to incompatible existentials for the field and
@@ -400,19 +399,21 @@ trait MethodSynthesis {
// Range position errors ensue if we don't duplicate this in some
// circumstances (at least: concrete vals with existential types.)
case ExistentialType(_, _) => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus)
- case _ if mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
+ case _ if isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field
case tp => TypeTree(tp)
}
tpt setPos tree.tpt.pos.focus
}
override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt)
}
+
/** Implements lazy value accessors:
- * - for lazy values of type Unit and all lazy fields inside traits,
- * the rhs is the initializer itself
- * - for all other lazy values z the accessor is a block of this form:
- * { z = <rhs>; z } where z can be an identifier or a field.
- */
+ * - for lazy values of type Unit and all lazy fields inside traits,
+ * the rhs is the initializer itself, because we'll just "compute" the result on every access
+ * ("computing" unit / constant type is free -- the side-effect is still only run once, using the init bitmap)
+ * - for all other lazy values z the accessor is a block of this form:
+ * { z = <rhs>; z } where z can be an identifier or a field.
+ */
case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
extends ChangeOwnerTraverser(oldowner, newowner) {
@@ -432,10 +433,10 @@ trait MethodSynthesis {
override def derivedTree: DefDef = {
val ValDef(_, _, tpt0, rhs0) = tree
val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0)
- val body = (
- if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
+ val body =
+ if (tree.symbol.owner.isTrait || Field.noFieldFor(tree)) rhs1 // TODO move tree.symbol.owner.isTrait into noFieldFor
else gen.mkAssignAndReturn(basisSym, rhs1)
- )
+
derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition
val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))
// ValDef will have its position focused whereas DefDef will have original correct rangepos
@@ -454,6 +455,24 @@ trait MethodSynthesis {
override def derivedSym = basisSym.setterIn(enclClass)
}
+
+ object Field {
+ // No field for these vals (either never emitted or eliminated later on):
+ // - abstract vals have no value we could store (until they become concrete, potentially)
+ // - lazy vals of type Unit
+ // - [Emitted, later removed during AddInterfaces/Mixins] concrete vals in traits can't have a field
+ // - [Emitted, later removed during Constructors] a concrete val with a statically known value (Unit / ConstantType)
+ // performs its side effect according to lazy/strict semantics, but doesn't need to store its value
+ // each access will "evaluate" the RHS (a literal) again
+ // We would like to avoid emitting unnecessary fields, but the required knowledge isn't available until after typer.
+ // The only way to avoid emitting & suppressing, is to not emit at all until we are sure to need the field, as dotty does.
+ // NOTE: do not look at `vd.symbol` when called from `enterGetterSetter` (luckily, that call-site implies `!mods.isLazy`),
+ // as the symbol info is in the process of being created then.
+ // TODO: harmonize tree & symbol creation
+ // TODO: the `def field` call-site does not tollerate including `|| vd.symbol.owner.isTrait` --> tests break
+ def noFieldFor(vd: ValDef) = vd.mods.isDeferred || (vd.mods.isLazy && isUnitType(vd.symbol.info)) // || vd.symbol.owner.isTrait))
+ }
+
case class Field(tree: ValDef) extends DerivedFromValDef {
def name = tree.localName
def category = FieldTargetClass
@@ -462,11 +481,13 @@ trait MethodSynthesis {
// By default annotations go to the field, except if the field is
// generated for a class parameter (PARAMACCESSOR).
override def keepClean = !mods.isParamAccessor
- override def derivedTree = (
- if (mods.isDeferred) EmptyTree
- else if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus)
+
+ // handle lazy val first for now (we emit a Field even though we probably shouldn't...)
+ override def derivedTree =
+ if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus)
+ else if (Field.noFieldFor(tree)) EmptyTree
else copyValDef(tree)(mods = mods | flagsExtra, name = this.name)
- )
+
}
case class Param(tree: ValDef) extends DerivedFromValDef {
def name = tree.name
@@ -501,12 +522,12 @@ trait MethodSynthesis {
// Derives a tree without attempting to use the original tree's symbol.
override def derivedTree = {
atPos(tree.pos.focus) {
- DefDef(derivedMods, name, Nil, ListOfNil, tree.tpt.duplicate,
+ DefDef(derivedMods mapAnnotations (_ => Nil), name, Nil, ListOfNil, tree.tpt.duplicate,
if (isDeferred) EmptyTree else Select(This(owner), tree.name)
)
}
}
- override def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
+ override def createAndEnterSymbol(): MethodSymbol = enterSyntheticSym(derivedTree).asInstanceOf[MethodSymbol]
}
case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { }
case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 4ad81b60ae..254cb7111c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -115,21 +115,14 @@ trait Namers extends MethodSynthesis {
TypeSigError(tree, ex)
alt
}
- // PRIVATE | LOCAL are fields generated for primary constructor arguments
- // @PP: ...or fields declared as private[this]. PARAMACCESSOR marks constructor arguments.
- // Neither gets accessors so the code is as far as I know still correct.
- def noEnterGetterSetter(vd: ValDef) = !vd.mods.isLazy && (
- !owner.isClass
- || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
- || (vd.name startsWith nme.OUTER)
- || (context.unit.isJava)
- || isEnumConstant(vd)
- )
- def noFinishGetterSetter(vd: ValDef) = (
- (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
- || vd.symbol.isModuleVar
- || isEnumConstant(vd))
+ // All lazy vals need accessors, including those owned by terms (e.g., in method) or private[this] in a class
+ def deriveAccessors(vd: ValDef) = vd.mods.isLazy || (owner.isClass && deriveAccessorsInClass(vd))
+
+ private def deriveAccessorsInClass(vd: ValDef) =
+ !vd.mods.isPrivateLocal && // note, private[this] lazy vals do get accessors -- see outer disjunction of deriveAccessors
+ !(vd.name startsWith nme.OUTER) && // outer accessors are added later, in explicitouter
+ !isEnumConstant(vd) // enums can only occur in classes, so only check here
/** Determines whether this field holds an enum constant.
* To qualify, the following conditions must be met:
@@ -655,11 +648,15 @@ trait Namers extends MethodSynthesis {
}
}
- def enterValDef(tree: ValDef) {
- if (noEnterGetterSetter(tree))
- assignAndEnterFinishedSymbol(tree)
- else
- enterGetterSetter(tree)
+ def enterValDef(tree: ValDef): Unit = {
+ val isScala = !context.unit.isJava
+ if (isScala) {
+ if (nme.isSetterName(tree.name)) ValOrVarWithSetterSuffixError(tree)
+ if (tree.mods.isPrivateLocal && tree.mods.isCaseAccessor) PrivateThisCaseClassParameterError(tree)
+ }
+
+ if (isScala && deriveAccessors(tree)) enterGetterSetter(tree)
+ else assignAndEnterFinishedSymbol(tree)
if (isEnumConstant(tree))
tree.symbol setInfo ConstantType(Constant(tree.symbol))
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 0198529ef7..7252cae0b5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1188,20 +1188,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// set NoType so it will be ignored.
val cdef = ClassDef(module.moduleClass, impl) setType NoType
- // Create the module var unless the immediate owner is a class and
- // the module var already exists there. See SI-5012, SI-6712.
- def findOrCreateModuleVar() = {
- val vsym = (
- if (site.isTerm) NoSymbol
- else site.info decl nme.moduleVarName(moduleName)
- )
- vsym orElse (site newModuleVarSymbol module)
- }
def newInnerObject() = {
- // Create the module var unless it is already in the module owner's scope.
- // The lookup is on module.enclClass and not module.owner lest there be a
- // nullary method between us and the class; see SI-5012.
- val moduleVar = findOrCreateModuleVar()
+ val moduleVar = site newModuleVarSymbol module
val rhs = gen.newModule(module, moduleVar.tpe)
val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
val accessor = DefDef(module, body.changeOwner(moduleVar -> module))
@@ -1217,6 +1205,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
val newTrees = cdef :: (
if (module.isStatic)
+ // trait T { def f: Object }; object O extends T { object f }. Need to generate method f in O.
if (module.isOverridingSymbol) matchingInnerObject() else Nil
else
newInnerObject()
@@ -1468,10 +1457,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case m: MemberDef =>
val sym = m.symbol
applyChecks(sym.annotations)
- // validate implicitNotFoundMessage
- analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
- reporter.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
- }
+
+ def messageWarning(name: String)(warn: String) =
+ reporter.warning(tree.pos, f"Invalid $name message for ${sym}%s${sym.locationString}%s:%n$warn")
+
+ // validate implicitNotFoundMessage and implicitAmbiguousMessage
+ analyzer.ImplicitNotFoundMsg.check(sym) foreach messageWarning("implicitNotFound")
+ analyzer.ImplicitAmbiguousMsg.check(sym) foreach messageWarning("implicitAmbiguous")
case tpt@TypeTree() =>
if(tpt.original != null) {
@@ -1511,7 +1503,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
sym.isSourceMethod &&
sym.isCase &&
sym.name == nme.apply &&
- isClassTypeAccessible(tree)
+ isClassTypeAccessible(tree) &&
+ !tree.tpe.resultType.typeSymbol.primaryConstructor.isLessAccessibleThan(tree.symbol)
if (doTransform) {
tree foreach {
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index ea44b9dc39..92b0719ba3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -132,7 +132,7 @@ trait StdAttachments {
/** Marks the tree as a macro impl reference, which is a naked reference to a method.
*
* This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`),
- * because otherwise typing a naked reference will result in the "follow this method with `_' if you want to
+ * because otherwise typing a naked reference will result in the "follow this method with `_` if you want to
* treat it as a partially applied function" errors.
*
* This mark suppresses adapt except for when the annottee is a macro application.
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index d65d2092ad..4ccc183334 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -304,6 +304,7 @@ trait SyntheticMethods extends ast.TreeDSL {
clazz.isModuleClass
&& clazz.isSerializable
&& !hasConcreteImpl(nme.readResolve)
+ && clazz.isStatic
)
def synthesize(): List[Tree] = {
@@ -335,16 +336,18 @@ trait SyntheticMethods extends ast.TreeDSL {
}
for ((m, impl) <- methods ; if shouldGenerate(m)) yield impl()
}
- def extras = (
+ def extras = {
if (needsReadResolve) {
// Aha, I finally decoded the original comment.
// This method should be generated as private, but apparently if it is, then
// it is name mangled afterward. (Wonder why that is.) So it's only protected.
// For sure special methods like "readResolve" should not be mangled.
- List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => {
+ m setFlag PRIVATE; REF(clazz.sourceModule)
+ }))
}
else Nil
- )
+ }
try impls ++ extras
catch { case _: TypeError if reporter.hasErrors => Nil }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index 57dc74d2a0..56127f4026 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -36,7 +36,7 @@ trait Tags {
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
* If false then materialization macros are prohibited from running.
*
- * @returns Tree that represents an `scala.reflect.ClassTag` for `tp` if everything is okay.
+ * @return Tree that represents an `scala.reflect.ClassTag` for `tp` if everything is okay.
* EmptyTree if the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
* EmptyTree if `allowMaterialization` is false, and there is no class tag in scope.
*/
@@ -57,7 +57,7 @@ trait Tags {
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
* If false then materialization macros are prohibited from running.
*
- * @returns Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay.
+ * @return Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay.
* EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
* EmptyTree if `allowMaterialization` is false, and there is no array tag in scope.
*/
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index a7d48ceb89..e8db8309f1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -262,7 +262,14 @@ abstract class TreeCheckers extends Analyzer {
checkedTyped(tree, mode, pt)
)
private def checkedTyped(tree: Tree, mode: Mode, pt: Type): Tree = {
- val typed = wrap(tree)(super.typed(tree, mode, pt))
+ val typed = wrap(tree)(super.typed(tree.clearType(), mode, pt))
+
+ // Vlad: super.typed returns null for package defs, why is that?
+ if (typed eq null)
+ return tree
+
+ if (typed.tpe ne null)
+ assert(!typed.tpe.isErroneous, "Tree has erroneous type: " + typed)
if (tree ne typed)
treesDiffer(tree, typed)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index cc15a2485f..f26baf7d93 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2714,7 +2714,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
*
* If 'T' is not fully defined, it is inferred by type checking
* `apply$body` without a result type before type checking the block.
- * The method's inferred result type is used instead of T`. [See test/files/pos/sammy_poly.scala]
+ * The method's inferred result type is used instead of `T`. [See test/files/pos/sammy_poly.scala]
*
* The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
* and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
@@ -3294,7 +3294,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3
//
// One can think of these methods as being infinitely overloaded. We create
- // a ficticious new cloned method symbol for each call site that takes on a signature
+ // a fictitious new cloned method symbol for each call site that takes on a signature
// governed by a) the argument types and b) the expected type
val args1 = typedArgs(args, forArgMode(fun, mode))
val pts = args1.map(_.tpe.deconst)
@@ -4095,7 +4095,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def resultingTypeTree(tpe: Type) = {
// we need symbol-ful originals for reification
- // hence we go the extra mile to hand-craft tis guy
+ // hence we go the extra mile to hand-craft this guy
val original = arg1 match {
case tt @ TypeTree() if tt.original != null => Annotated(ann, tt.original)
// this clause is needed to correctly compile stuff like "new C @D" or "@(inline @getter)"
@@ -4247,7 +4247,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// in the special (though common) case where the types are equal, it pays to pack before comparing
// especially virtpatmat needs more aggressive unification of skolemized types
// this breaks src/library/scala/collection/immutable/TrieIterator.scala
- // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ // annotated types need to be lubbed regardless (at least, continuations break if you bypass them like this)
def samePackedTypes = (
!isPastTyper
&& thenp1.tpe.annotations.isEmpty
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
index 550fd4e68d..37fbb73b85 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala
@@ -159,7 +159,7 @@ trait TypersTracking {
// Some trees which are typed with mind-numbing frequency and
// which add nothing by being printed. Did () type to Unit? Let's
// gamble on yes.
- private def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t))
+ def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t))
def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t)
def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || (
(tree1.tpe == tree2.tpe)
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 8d4d07759f..2811520b67 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -22,7 +22,7 @@ import Jar.isJarOrZip
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
- * java, see [http://java.sun.com/javase/6/docs/technotes/tools/windows/classpath.html]
+ * java, see [[http://docs.oracle.com/javase/6/docs/technotes/tools/windows/classpath.html]]
* </p>
*
* @author Stepan Koltsov
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 4ff7067a21..501546b8f6 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -37,7 +37,7 @@ object DocStrings {
/** Returns index of string `str` after `start` skipping longest
* sequence of space and tab characters, possibly also containing
* a single `*` character or the `/``**` sequence.
- * @pre start == str.length || str(start) == `\n'
+ * @pre start == str.length || str(start) == `\n`
*/
def skipLineLead(str: String, start: Int): Int =
if (start == str.length) start
@@ -49,7 +49,7 @@ object DocStrings {
else idx
}
- /** Skips to next occurrence of `\n' or to the position after the `/``**` sequence following index `start`.
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
*/
def skipToEol(str: String, start: Int): Int =
if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index ac63232967..e30d1ed7cd 100644
--- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -1,9 +1,11 @@
package scala.tools
package reflect
+import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Global
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.Settings
+import scala.tools.nsc.typechecker.Analyzer
/** A version of Global that uses reflection to get class
* infos, instead of reading class or source files.
@@ -11,6 +13,20 @@ import scala.tools.nsc.Settings
class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader)
extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
+ override lazy val analyzer = new {
+ val global: ReflectGlobal.this.type = ReflectGlobal.this
+ } with Analyzer {
+ /** Obtains the classLoader used for runtime macro expansion.
+ *
+ * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]].
+ * The [[rootClassLoader]] is used to obtain runtime defined macros.
+ */
+ override protected def findMacroClassLoader(): ClassLoader = {
+ val classpath = global.classPath.asURLs
+ ScalaClassLoader.fromURLs(classpath, rootClassLoader)
+ }
+ }
+
override def transformedType(sym: Symbol) =
postErasure.transformInfo(sym,
erasure.transformInfo(sym,
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index 1be186d114..8caf0c5c0e 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -27,7 +27,7 @@ package scala
* w.print()
* }}}
*
- * See the [[http://docs.scala-lang.org/sips/pending/value-classes.html value classes guide]] for more
+ * See the [[http://docs.scala-lang.org/sips/completed/value-classes.html value classes guide]] for more
* details on the interplay of universal traits and value classes.
*/
abstract class Any {
diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala
index ff62948413..fb3d213e19 100644
--- a/src/library/scala/AnyVal.scala
+++ b/src/library/scala/AnyVal.scala
@@ -49,7 +49,7 @@ package scala
* It's important to note that user-defined value classes are limited, and in some circumstances,
* still must allocate a value class instance at runtime. These limitations and circumstances are
* explained in greater detail in the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes Guide]]
- * as well as in [[http://docs.scala-lang.org/sips/pending/value-classes.html SIP-15: Value Classes]],
+ * as well as in [[http://docs.scala-lang.org/sips/completed/value-classes.html SIP-15: Value Classes]],
* the Scala Improvement Proposal.
*/
abstract class AnyVal extends Any {
diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala
index f2f9ead44c..e06557ccdd 100644
--- a/src/library/scala/Equals.scala
+++ b/src/library/scala/Equals.scala
@@ -13,8 +13,9 @@ package scala
*/
trait Equals extends Any {
/** A method that should be called from every well-designed equals method
- * that is open to be overridden in a subclass. See Programming in Scala,
- * Chapter 28 for discussion and design.
+ * that is open to be overridden in a subclass. See
+ * [[http://www.artima.com/pins1ed/object-equality.html Programming in Scala,
+ * Chapter 28]] for discussion and design.
*
* @param that the value being probed for possible equality
* @return true if this instance can possibly equal `that`, otherwise false
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index f134f5ce3d..7282feebb6 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -270,7 +270,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* // Returns None because the partial function doesn't cover the case.
* Some("ftp") collect {case "http" => "HTTP"}
*
- * // Returns None because None is passed to the collect method.
+ * // Returns None because the option is empty. There is no value to pass to the partial function.
* None collect {case value => value}
* }}}
*
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index fa58015a84..334377e838 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -56,7 +56,7 @@ import scala.io.StdIn
* only contain natural numbers (i.e. non-negative), and that the result returned
* will also be natural. `require` is distinct from `assert` in that if the
* condition fails, then the caller of the function is to blame rather than a
- * logical error having been made within `addNaturals` itself. `ensures` is a
+ * logical error having been made within `addNaturals` itself. `ensuring` is a
* form of `assert` that declares the guarantee the function is providing with
* regards to its return value.
*
diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala
new file mode 100644
index 0000000000..46eab9ae8f
--- /dev/null
+++ b/src/library/scala/annotation/implicitAmbiguous.scala
@@ -0,0 +1,34 @@
+package scala.annotation
+
+import scala.annotation.meta._
+
+/**
+ * To customize the error message that's emitted when an implicit of type
+ * C[T1,..., TN] is found more than once, annotate the class C
+ * with @implicitAmbiguous. Assuming C has type parameters X1,..., XN, the
+ * error message will be the result of replacing all occurrences of ${Xi} in
+ * the string msg with the string representation of the corresponding type
+ * argument Ti. *
+ *
+ * If more than one @implicitAmbiguous annotation is collected, the compiler is
+ * free to pick any of them to display.
+ *
+ * Nice errors can direct users to fix imports or even tell them why code
+ * intentionally doesn't compile.
+ *
+ * {{{
+ * trait =!=[C, D]
+ *
+ * implicit def neq[E, F] : E =!= F = null
+ *
+ * @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}")
+ * implicit def neqAmbig1[G, H, J] : J =!= J = null
+ * implicit def neqAmbig2[I] : I =!= I = null
+ *
+ * implicitly[Int =!= Int]
+ * }}}
+ *
+ * @author Brian McKenna
+ * @since 2.12.0
+ */
+final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 49b4397cf2..603d97c3ad 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -162,13 +162,6 @@ class HashSet[A] extends AbstractSet[A]
def - (e: A): HashSet[A] =
nullToEmpty(removed0(e, computeHash(e), 0))
- /** Returns this $coll as an immutable set.
- *
- * A new set will not be built; lazy collections will stay lazy.
- */
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
-
override def filter(p: A => Boolean) = {
val buffer = new Array[HashSet[A]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -201,6 +194,7 @@ class HashSet[A] extends AbstractSet[A]
protected def writeReplace(): AnyRef = new HashSet.SerializationProxy(this)
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[HashSet[B]]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index a6e6fba0a5..adc975479a 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -138,13 +138,6 @@ class ListSet[A] extends AbstractSet[A]
override def stringPrefix = "ListSet"
- /** Returns this $coll as an immutable set.
- *
- * A new set will not be built; lazy collections will stay lazy.
- */
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
-
/** Represents an entry in the `ListSet`.
*/
protected class Node(override val head: A) extends ListSet[A] with Serializable {
@@ -186,4 +179,6 @@ class ListSet[A] extends AbstractSet[A]
override def tail: ListSet[A] = self
}
+
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]]
}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 94a5b7929a..bd5b9c9faf 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -113,6 +113,11 @@ self =>
override def - (elem: A): immutable.Set[A] =
if (this(elem)) immutable.Set[A]() ++ this - elem
else this
+
+ // ImmutableDefaultKeySet is only protected, so we won't warn on override.
+ // Someone could override in a way that makes widening not okay
+ // (e.g. by overriding +, though the version in this class is fine)
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
}
/** This function transforms all the values of mappings contained
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 7725ad9ee3..a115469b83 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -35,7 +35,22 @@ trait Set[A] extends Iterable[A]
override def companion: GenericCompanion[Set] = Set
- override def toSet[B >: A]: Set[B] = to[({type l[a] = immutable.Set[B]})#l] // for bincompat; remove in dev
+ /** Returns this $coll as an immutable set, perhaps accepting a
+ * wider range of elements. Since it already is an
+ * immutable set, it will only be rebuilt if the underlying structure
+ * cannot be expanded to include arbitrary element types.
+ * For instance, `BitSet` and `SortedSet` will be rebuilt, as
+ * they require `Int` and sortable elements respectively.
+ *
+ * When in doubt, the set will be rebuilt. Rebuilt sets never
+ * need to be rebuilt again.
+ */
+ override def toSet[B >: A]: Set[B] = {
+ // This way of building sets typically has the best benchmarks, surprisingly!
+ val sb = Set.newBuilder[B]
+ foreach(sb += _)
+ sb.result()
+ }
override def seq: Set[A] = this
protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
@@ -88,8 +103,10 @@ object Set extends ImmutableSetFactory[Set] {
if (f(elem1)) Some(elem1)
else None
}
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ // Why is Set1 non-final? Need to fix that!
+ @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]]
+
}
/** An optimized representation for immutable sets of size 2 */
@@ -121,8 +138,9 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem2)) Some(elem2)
else None
}
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ // Why is Set2 non-final? Need to fix that!
+ @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]]
}
/** An optimized representation for immutable sets of size 3 */
@@ -156,8 +174,9 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem3)) Some(elem3)
else None
}
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ // Why is Set3 non-final? Need to fix that!
+ @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]]
}
/** An optimized representation for immutable sets of size 4 */
@@ -193,8 +212,9 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem4)) Some(elem4)
else None
}
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ // Why is Set4 non-final? Need to fix that!
+ @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]]
}
}
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index f1493551ab..682788e18e 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -53,6 +53,12 @@ self =>
val map = self.rangeImpl(from, until)
new map.DefaultKeySortedSet
}
+ override def toSet[C >: A]: Set[C] = {
+ // This way of building sets typically has the best benchmarks, surprisingly!
+ val sb = Set.newBuilder[C]
+ foreach(sb += _)
+ sb.result()
+ }
}
/** Add a key/value pair to this map.
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
deleted file mode 100644
index b63d0aae33..0000000000
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package collection
-package mutable
-
-/**
- * An immutable AVL Tree implementation formerly used by mutable.TreeSet
- *
- * @author Lucien Pereira
- */
-@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2")
-private[mutable] sealed trait AVLTree[+A] extends Serializable {
- def balance: Int
-
- def depth: Int
-
- def iterator[B >: A]: Iterator[B] = Iterator.empty
-
- def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
- throw new NoSuchElementException(String.valueOf(value))
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
- def rebalance[B >: A]: AVLTree[B] = this
-
- def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-
- def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case object Leaf extends AVLTree[Nothing] {
- override val balance: Int = 0
-
- override val depth: Int = -1
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
- override val balance: Int = right.depth - left.depth
-
- override val depth: Int = math.max(left.depth, right.depth) + 1
-
- override def iterator[B >: A]: Iterator[B] = new AVLIterator(this)
-
- override def contains[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- true
- else if (ord < 0)
- left.contains(value, ordering)
- else
- right.contains(value, ordering)
- }
-
- /**
- * Returns a new tree containing the given element.
- * Throws an IllegalArgumentException if element is already present.
- *
- */
- override def insert[B >: A](value: B, ordering: Ordering[B]) = {
- val ord = ordering.compare(value, data)
- if (0 == ord)
- throw new IllegalArgumentException()
- else if (ord < 0)
- Node(data, left.insert(value, ordering), right).rebalance
- else
- Node(data, left, right.insert(value, ordering)).rebalance
- }
-
- /**
- * Return a new tree which not contains given element.
- *
- */
- override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
- val ord = ordering.compare(value, data)
- if(ord == 0) {
- if (Leaf == left) {
- if (Leaf == right) {
- Leaf
- } else {
- val (min, newRight) = right.removeMin
- Node(min, left, newRight).rebalance
- }
- } else {
- val (max, newLeft) = left.removeMax
- Node(max, newLeft, right).rebalance
- }
- } else if (ord < 0) {
- Node(data, left.remove(value, ordering), right).rebalance
- } else {
- Node(data, left, right.remove(value, ordering)).rebalance
- }
- }
-
- /**
- * Return a tuple containing the smallest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMin[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == left)
- (data, right)
- else {
- val (min, newLeft) = left.removeMin
- (min, Node(data, newLeft, right).rebalance)
- }
- }
-
- /**
- * Return a tuple containing the biggest element of the provided tree
- * and a new tree from which this element has been extracted.
- *
- */
- override def removeMax[B >: A]: (B, AVLTree[B]) = {
- if (Leaf == right)
- (data, left)
- else {
- val (max, newRight) = right.removeMax
- (max, Node(data, left, newRight).rebalance)
- }
- }
-
- override def rebalance[B >: A] = {
- if (-2 == balance) {
- if (1 == left.balance)
- doubleRightRotation
- else
- rightRotation
- } else if (2 == balance) {
- if (-1 == right.balance)
- doubleLeftRotation
- else
- leftRotation
- } else {
- this
- }
- }
-
- override def leftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- Node(r.data, Node(data, left, r.left), r.right)
- } else sys.error("Should not happen.")
- }
-
- override def rightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- Node(l.data, l.left, Node(data, l.right, right))
- } else sys.error("Should not happen.")
- }
-
- override def doubleLeftRotation[B >: A] = {
- if (Leaf != right) {
- val r: Node[A] = right.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the left rotation
- val rightRotated = r.rightRotation
- Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right)
- } else sys.error("Should not happen.")
- }
-
- override def doubleRightRotation[B >: A] = {
- if (Leaf != left) {
- val l: Node[A] = left.asInstanceOf[Node[A]]
- // Let's save an instanceOf by 'inlining' the right rotation
- val leftRotated = l.leftRotation
- Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right))
- } else sys.error("Should not happen.")
- }
-}
-
-/**
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
- */
-private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
- val stack = mutable.ArrayStack[Node[A]](root)
- diveLeft()
-
- private def diveLeft(): Unit = {
- if (Leaf != stack.head.left) {
- val left: Node[A] = stack.head.left.asInstanceOf[Node[A]]
- stack.push(left)
- diveLeft()
- }
- }
-
- private def engageRight(): Unit = {
- if (Leaf != stack.head.right) {
- val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
- stack.pop()
- stack.push(right)
- diveLeft()
- } else
- stack.pop()
- }
-
- override def hasNext: Boolean = !stack.isEmpty
-
- override def next(): A = {
- if (stack.isEmpty)
- throw new NoSuchElementException()
- else {
- val result = stack.head.data
- // Let's maintain stack for the next invocation
- engageRight()
- result
- }
- }
-}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 619beeb1d6..e6889da3b5 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -18,8 +18,19 @@ import generic._
*
* Only the `dequeue` and `dequeueAll` methods will return elements in priority
* order (while removing elements from the heap). Standard collection methods
- * including `drop` and `iterator` will remove or traverse the heap in whichever
- * order seems most convenient.
+ * including `drop`, `iterator`, and `toString` will remove or traverse the heap
+ * in whichever order seems most convenient.
+ *
+ * Therefore, printing a `PriorityQueue` will not reveal the priority order of
+ * the elements, though the highest-priority element will be printed first. To
+ * print the elements in order, one must duplicate the `PriorityQueue` (by using
+ * `clone`, for instance) and then dequeue them:
+ *
+ * @example {{{
+ * val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7)
+ * println(pq) // elements probably not in order
+ * println(pq.clone.dequeueAll) // prints Vector(7, 5, 3, 2, 1)
+ * }}}
*
* @tparam A type of the elements in this priority queue.
* @param ord implicit ordering used to compare the elements of type `A`.
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 9634f6d900..1ee27b0f36 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -40,7 +40,7 @@ class SyncVar[A] {
wait(timeout)
val elapsed = System.nanoTime() - start
// nanoTime should be monotonic, but it's not possible to rely on that.
- // See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6458294.
+ // See http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6458294.
if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed)
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 078ad45be9..3538ac6b94 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -178,7 +178,9 @@ private[concurrent] object Promise {
* DefaultPromises, and `linkedRootOf` is currently only designed to be called
* by Future.flatMap.
*/
- final class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] {
+ // Left non-final to enable addition of extra fields by Java/Scala converters
+ // in scala-java8-compat.
+ class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] {
/** Get the root promise for this promise, compressing the link chain to that
* promise if necessary.
@@ -248,12 +250,12 @@ private[concurrent] object Promise {
@throws(classOf[TimeoutException])
@throws(classOf[InterruptedException])
- def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
+ final def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost + "]")
@throws(classOf[Exception])
- def result(atMost: Duration)(implicit permit: CanAwait): T =
+ final def result(atMost: Duration)(implicit permit: CanAwait): T =
ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
def value: Option[Try[T]] = value0
@@ -265,7 +267,7 @@ private[concurrent] object Promise {
case _ => None
}
- override def isCompleted: Boolean = isCompleted0
+ override final def isCompleted: Boolean = isCompleted0
@tailrec
private def isCompleted0: Boolean = get() match {
@@ -274,7 +276,7 @@ private[concurrent] object Promise {
case _ => false
}
- def tryComplete(value: Try[T]): Boolean = {
+ final def tryComplete(value: Try[T]): Boolean = {
val resolved = resolveTry(value)
tryCompleteAndGetListeners(resolved) match {
case null => false
@@ -297,7 +299,7 @@ private[concurrent] object Promise {
}
}
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
+ final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit =
dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func))
/** Tries to add the callback, if already completed, it dispatches the callback to be executed.
diff --git a/src/library/scala/concurrent/util/Unsafe.java b/src/library/scala/concurrent/util/Unsafe.java
deleted file mode 100644
index 73739e377d..0000000000
--- a/src/library/scala/concurrent/util/Unsafe.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent.util;
-import java.lang.reflect.Field;
-
-// TODO: remove once akka no longer needs it, hopefully by 2.12.0-M3!
-@Deprecated
-public final class Unsafe {
- @Deprecated
- public final static sun.misc.Unsafe instance;
- static {
- try {
- sun.misc.Unsafe found = null;
- for(Field field : sun.misc.Unsafe.class.getDeclaredFields()) {
- if (field.getType() == sun.misc.Unsafe.class) {
- field.setAccessible(true);
- found = (sun.misc.Unsafe) field.get(null);
- break;
- }
- }
- if (found == null) throw new IllegalStateException("Can't find instance of sun.misc.Unsafe");
- else instance = found;
- } catch(Throwable t) {
- throw new ExceptionInInitializerError(t);
- }
- }
-}
-
-// Scala version:
-// classOf[sun.misc.Unsafe].getDeclaredFields.filter(_.getType == classOf[sun.misc.Unsafe]).headOption.map { field =>
-// field.setAccessible(true); field.get(null).asInstanceOf[sun.misc.Unsafe]
-// } getOrElse (throw new IllegalStateException("Can't find instance of sun.misc.Unsafe"))
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 60f99199cb..2a41e25b01 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -22,7 +22,7 @@ import scala.language.implicitConversions
// <?xml version="1.0" encoding="ISO8859-1" ?>
//
// MacRoman vs. UTF-8: see http://jira.codehaus.org/browse/JRUBY-3576
-// -Dfile.encoding: see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4375816
+// -Dfile.encoding: see http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4375816
/** A class for character encoding/decoding preferences.
*
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
index ca7a3cddb8..82ec872806 100644
--- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -218,7 +218,7 @@ object ClassManifestFactory {
/** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection.
- * todo: remove after next boostrap
+ * todo: remove after next bootstrap
*/
def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassManifest[T] {
@@ -239,4 +239,4 @@ private class ClassTypeManifest[T](
(if (prefix.isEmpty) "" else prefix.get.toString+"#") +
(if (runtimeClass.isArray) "Array" else runtimeClass.getName) +
argString
-} \ No newline at end of file
+}
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index a0d89fc0e1..026d5edd29 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -9,11 +9,10 @@
package scala
package runtime
-import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
+import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator, GenIterable }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted, IsTraversableLike }
-import scala.collection.parallel.ParIterable
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -156,7 +155,7 @@ object ScalaRunTime {
arr
}
- // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
+ // Java bug: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m)
@@ -326,8 +325,7 @@ object ScalaRunTime {
case x if useOwnToString(x) => x.toString
case x: AnyRef if isArray(x) => arrayToString(x)
case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
- case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
- case x: ParIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index 512c4fbc27..1c432b0403 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -110,6 +110,8 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
return
}
}
+
+ override def toString = "(%s, %s).zipped".format(colls._1.toString, colls._2.toString)
}
object Tuple2Zipped {
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index ffd44acf81..3bab86921b 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -118,6 +118,8 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
return
}
}
+
+ override def toString: String = "(%s, %s, %s).zipped".format(colls._1.toString, colls._2.toString, colls._3.toString)
}
object Tuple3Zipped {
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 5ec2e73cb9..0e9a1bfc56 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -157,7 +157,8 @@ package scala.sys {
* while(input.read() != -1) count += 1
* input.close()
* }
- * cat ! new ProcessIO(_.close(), byteCounter, _.close())
+ * val p = cat run new ProcessIO(_.close(), byteCounter, _.close())
+ * p.exitValue()
* count
* }
*
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index ee2bdbc4a7..b4f965f69b 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -88,7 +88,7 @@ object Sorting {
a(pL - 1) = current
pL -= 1
case x if x < 0 =>
- // Already in place. Just update indicies.
+ // Already in place. Just update indices.
iA += 1
case _ if iB > pR =>
// Wrong side. There's room on the other side, so swap
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index aa30887ba0..24c297a2fc 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -105,7 +105,7 @@ object Exception {
case x if rethrow(x) => throw x
case x if pf isDefinedAt x => pf(x)
}
- finally fin map (_.invoke())
+ finally fin foreach (_.invoke())
/* Create an empty Try container with this Catch and the supplied `Finally`. */
def andFinally(body: => Unit): Catch[T] = fin match {
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index cd7648a44a..f9b49f1730 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -588,7 +588,7 @@ trait Types {
/** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)`
* Here, `pre` is the prefix of the type reference, `sym` is the symbol
* referred to by the type reference, and `args` is a possible empty list of
- * type argumenrts.
+ * type arguments.
* @group Extractors
*/
abstract class TypeRefExtractor {
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index e5d97e8959..a4223c1cb5 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -14,7 +14,7 @@ object ClassfileConstants {
final val JAVA_MAJOR_VERSION = 45
final val JAVA_MINOR_VERSION = 3
- /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
+ /** (see http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.1)
*
* If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also
* be set (ch. 2.13.1).
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 14487a941b..5ce5c39145 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -233,6 +233,8 @@ trait Definitions extends api.StandardDefinitions {
|| tp =:= AnyRefTpe
)
+ def isUnitType(tp: Type) = tp.typeSymbol == UnitClass && tp.annotations.isEmpty
+
def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe)
@@ -1105,6 +1107,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BridgeClass = requiredClass[scala.annotation.bridge]
lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
+ lazy val ImplicitAmbiguousClass = getClassIfDefined("scala.annotation.implicitAmbiguous")
lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
lazy val SwitchClass = requiredClass[scala.annotation.switch]
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 8ae201f045..902ba9fa80 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -237,7 +237,7 @@ trait Kinds {
*
* Proper types are represented using ProperTypeKind.
*
- * Type constructors are reprented using TypeConKind.
+ * Type constructors are represented using TypeConKind.
*/
abstract class Kind {
import Kind.StringState
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 8a52f0b9d8..05fdfb51ed 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -884,10 +884,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// string. So this needs attention. For now the fact that migration is
// private[scala] ought to provide enough protection.
def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass)
- def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
- def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
- def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
- def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
+ def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
+ def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
+ def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
+ def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
+ def implicitAmbiguousMsg = getAnnotation(ImplicitAmbiguousClass) flatMap { _.stringArg(0) }
def isCompileTimeOnly = hasAnnotation(CompileTimeOnlyAttr)
def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0)
@@ -2123,7 +2124,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The package class containing this symbol, or NoSymbol if there
* is not one.
* TODO: formulate as enclosingSuchThat, after making sure
- * we can start with current symbol rather than onwner.
+ * we can start with current symbol rather than owner.
* TODO: Also harmonize with enclClass, enclMethod etc.
*/
def enclosingPackageClass: Symbol = {
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index e3f95f9fd8..bbd9df05d2 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1418,7 +1418,7 @@ trait Trees extends api.Trees {
transformTypeDefs(tparams), transform(rhs))
}
case LabelDef(name, params, rhs) =>
- treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy'
+ treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy'
case PackageDef(pid, stats) =>
treeCopy.PackageDef(
tree, transform(pid).asInstanceOf[RefTree],
@@ -1601,7 +1601,7 @@ trait Trees extends api.Trees {
case _ =>
// no special handling is required for Function or Import nodes here.
// as they don't have interesting infos attached to their symbols.
- // Subsitution of the referenced symbol of Return nodes is handled
+ // Substitution of the referenced symbol of Return nodes is handled
// in .ChangeOwnerTraverser
}
tree match {
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index abea8bed9f..85e3ac60e8 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -40,19 +40,27 @@ trait UnCurry {
apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
case NullaryMethodType(restpe) =>
apply(MethodType(List(), restpe))
- case TypeRef(pre, ByNameParamClass, arg :: Nil) =>
- apply(functionType(List(), arg))
- case TypeRef(pre, RepeatedParamClass, arg :: Nil) =>
- apply(seqType(arg))
- case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
- apply(arrayType(
- if (isUnboundedGeneric(arg)) ObjectTpe else arg))
+ case DesugaredParameterType(desugaredTpe) =>
+ apply(desugaredTpe)
case _ =>
expandAlias(mapOver(tp))
}
}
}
+ object DesugaredParameterType {
+ def unapply(tpe: Type): Option[Type] = tpe match {
+ case TypeRef(pre, ByNameParamClass, arg :: Nil) =>
+ Some(functionType(List(), arg))
+ case TypeRef(pre, RepeatedParamClass, arg :: Nil) =>
+ Some(seqType(arg))
+ case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
+ Some(arrayType(if (isUnboundedGeneric(arg)) ObjectTpe else arg))
+ case _ =>
+ None
+ }
+ }
+
private val uncurryType = new TypeMap {
def apply(tp0: Type): Type = {
val tp = expandAlias(tp0)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index ea213cadd9..278d081249 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -370,6 +370,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.BridgeClass
definitions.ElidableMethodClass
definitions.ImplicitNotFoundClass
+ definitions.ImplicitAmbiguousClass
definitions.MigrationAnnotationClass
definitions.ScalaStrictFPAttr
definitions.SwitchClass
@@ -444,7 +445,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.ScalaValueClassesNoUnit
definitions.ScalaValueClasses
-
+ uncurry.DesugaredParameterType
erasure.GenericArray
erasure.scalaErasure
erasure.specialScalaErasure
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 841b4abfa5..3b54f5274e 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -133,7 +133,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
catch AbstractOrMissingHandler()
}
- private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
private val logScope = scala.sys.props contains "scala.repl.scope"
private def scopelog(msg: String) = if (logScope) Console.err.println(msg)
@@ -888,7 +887,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** Code to import bound names from previous lines - accessPath is code to
* append to objectName to access anything bound by request.
*/
- lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) =
+ lazy val ComputedImports(headerPreamble, importsPreamble, importsTrailer, accessPath) =
exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass))
/** the line of code to compute */
@@ -905,9 +904,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def path = originalPath("$intp")
def envLines = {
if (!isReplPower) Nil // power mode only for now
- else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees"))
+ else {
+ val escapedLine = Constant(originalLine).escapedStringValue
+ List(s"""def $$line = $escapedLine """, """def $trees = _root_.scala.Nil""")
+ }
}
def preamble = s"""
+ |$headerPreamble
|${preambleHeader format lineRep.readName}
|${envLines mkString (" ", ";\n ", ";\n")}
|$importsPreamble
diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 3ec77e46f1..5b231d94b6 100644
--- a/src/repl/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -70,7 +70,10 @@ trait Imports {
/** Compute imports that allow definitions from previous
* requests to be visible in a new request. Returns
- * three pieces of related code:
+ * three or four pieces of related code:
+ *
+ * 0. Header code fragment that should go at the beginning
+ * of the compilation unit, specifically, import Predef.
*
* 1. An initial code fragment that should go before
* the code of the new request.
@@ -91,30 +94,34 @@ trait Imports {
* (3) It imports multiple same-named implicits, but only the
* last one imported is actually usable.
*/
- case class ComputedImports(prepend: String, append: String, access: String)
+ case class ComputedImports(header: String, prepend: String, append: String, access: String)
protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = {
+ val header, code, trailingBraces, accessPath = new StringBuilder
+ val currentImps = mutable.HashSet[Name]()
+ var predefEscapes = false // only emit predef import header if name not resolved in history, loosely
+
/** Narrow down the list of requests from which imports
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
*/
- case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+ case class ReqAndHandler(req: Request, handler: MemberHandler)
def reqsToUse: List[ReqAndHandler] = {
/** Loop through a list of MemberHandlers and select which ones to keep.
- * 'wanted' is the set of names that need to be imported.
+ * 'wanted' is the set of names that need to be imported.
*/
def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
// Single symbol imports might be implicits! See bug #1752. Rather than
// try to finesse this, we will mimic all imports for now.
def keepHandler(handler: MemberHandler) = handler match {
- /* While defining classes in class based mode - implicits are not needed. */
+ // While defining classes in class based mode - implicits are not needed.
case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x))
case _: ImportHandler => true
case x => x.definesImplicit || (x.definedNames exists wanted)
}
reqs match {
- case Nil => Nil
+ case Nil => predefEscapes = wanted contains PredefModule.name ; Nil
case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
case rh :: rest =>
import rh.handler._
@@ -127,9 +134,6 @@ trait Imports {
select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
}
- val code, trailingBraces, accessPath = new StringBuilder
- val currentImps = mutable.HashSet[Name]()
-
// add code for a new object to hold some imports
def addWrapper() {
import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
@@ -146,6 +150,9 @@ trait Imports {
try op finally addWrapper()
}
+ // imports from Predef are relocated to the template header to allow hiding.
+ def checkHeader(h: ImportHandler) = h.referencedNames contains PredefModule.name
+
// loop through previous requests, adding imports for each one
wrapBeforeAndAfter {
// Reusing a single temporary value when import from a line with multiple definitions.
@@ -153,6 +160,9 @@ trait Imports {
for (ReqAndHandler(req, handler) <- reqsToUse) {
val objName = req.lineRep.readPathInstance
handler match {
+ case h: ImportHandler if checkHeader(h) =>
+ header.clear()
+ header append f"${h.member}%n"
// If the user entered an import, then just use it; add an import wrapping
// level if the import might conflict with some other import
case x: ImportHandler if x.importsWildcard =>
@@ -194,7 +204,8 @@ trait Imports {
}
}
- ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
+ val computedHeader = if (predefEscapes) header.toString else ""
+ ComputedImports(computedHeader, code.toString, trailingBraces.toString, accessPath.toString)
}
private def allReqAndHandlers =
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 743c2a401c..0353e58e67 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -787,7 +787,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (isReduced) NodeSeq.Empty else {
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
- // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output
+ // notice the }{ in the next lines, they are necessary to avoid an undesired whitespace in output
<span name={ vl.name }>{
Text(vl.name)
}{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
index 4bed106f43..9381cf3a35 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -145,7 +145,7 @@ class DotProcess(settings: doc.Settings) {
// we shouldn't just sit there for 50s not reporting anything, no?
settings.printMsg("Graphviz dot encountered an error when generating the diagram for:")
settings.printMsg(templateName)
- settings.printMsg("These are usually spurious errors, but if you notice a persistant error on")
+ settings.printMsg("These are usually spurious errors, but if you notice a persistent error on")
settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.")
}
}
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 08372685d6..ce3c8062d7 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -30,7 +30,7 @@ scala> val four: anotherint = 4
four: anotherint = 4
scala> val bogus: anotherint = "hello"
-<console>:11: error: type mismatch;
+<console>:12: error: type mismatch;
found : String("hello")
required: anotherint
(which expands to) Int
@@ -353,7 +353,7 @@ defined class Term
scala> def f(e: Exp) = e match { // non-exhaustive warning here
case _:Fact => 3
}
-<console>:21: warning: match may not be exhaustive.
+<console>:22: warning: match may not be exhaustive.
It would fail on the following inputs: Exp(), Term()
def f(e: Exp) = e match { // non-exhaustive warning here
^
@@ -363,6 +363,6 @@ scala> :quit
plusOne: (x: Int)Int
res0: Int = 6
res0: String = after reset
-<console>:11: error: not found: value plusOne
+<console>:12: error: not found: value plusOne
plusOne(5) // should be undefined now
^
diff --git a/test/files/neg/implicit-ambiguous-2.check b/test/files/neg/implicit-ambiguous-2.check
new file mode 100644
index 0000000000..4a10b0dd65
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-2.check
@@ -0,0 +1,4 @@
+implicit-ambiguous-2.scala:10: error: Could not prove Int =!= Int
+ implicitly[Int =!= Int]
+ ^
+one error found
diff --git a/test/files/neg/implicit-ambiguous-2.scala b/test/files/neg/implicit-ambiguous-2.scala
new file mode 100644
index 0000000000..563c8c583f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-2.scala
@@ -0,0 +1,11 @@
+object Test {
+ trait =!=[C, D]
+
+ implicit def neq[E, F] : E =!= F = null
+
+ implicit def neqAmbig1[G, H, J] : J =!= J = null
+ @annotation.implicitAmbiguous("Could not prove ${I} =!= ${I}")
+ implicit def neqAmbig2[I] : I =!= I = null
+
+ implicitly[Int =!= Int]
+}
diff --git a/test/files/neg/implicit-ambiguous-invalid.check b/test/files/neg/implicit-ambiguous-invalid.check
new file mode 100644
index 0000000000..68b607c4c2
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-invalid.check
@@ -0,0 +1,7 @@
+implicit-ambiguous-invalid.scala:5: warning: Invalid implicitAmbiguous message for method neqAmbig1 in object Test:
+The type parameter B referenced in the message of the @implicitAmbiguous annotation is not defined by method neqAmbig1.
+ implicit def neqAmbig1[A] : A =!= A = null
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/implicit-ambiguous-invalid.flags b/test/files/neg/implicit-ambiguous-invalid.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-invalid.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/implicit-ambiguous-invalid.scala b/test/files/neg/implicit-ambiguous-invalid.scala
new file mode 100644
index 0000000000..f8f9da655f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous-invalid.scala
@@ -0,0 +1,6 @@
+object Test {
+ trait =!=[C, D]
+
+ @annotation.implicitAmbiguous("Could not prove ${A} =!= ${B}")
+ implicit def neqAmbig1[A] : A =!= A = null
+}
diff --git a/test/files/neg/implicit-ambiguous.check b/test/files/neg/implicit-ambiguous.check
new file mode 100644
index 0000000000..0b3cebcb6f
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous.check
@@ -0,0 +1,4 @@
+implicit-ambiguous.scala:10: error: Could not prove Int =!= Int
+ implicitly[Int =!= Int]
+ ^
+one error found
diff --git a/test/files/neg/implicit-ambiguous.scala b/test/files/neg/implicit-ambiguous.scala
new file mode 100644
index 0000000000..79b1297915
--- /dev/null
+++ b/test/files/neg/implicit-ambiguous.scala
@@ -0,0 +1,11 @@
+object Test {
+ trait =!=[C, D]
+
+ implicit def neq[E, F] : E =!= F = null
+
+ @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}")
+ implicit def neqAmbig1[G, H, J] : J =!= J = null
+ implicit def neqAmbig2[I] : I =!= I = null
+
+ implicitly[Int =!= Int]
+}
diff --git a/test/files/neg/name-lookup-stable.check b/test/files/neg/name-lookup-stable.check
index 751df9505e..68d98c4162 100644
--- a/test/files/neg/name-lookup-stable.check
+++ b/test/files/neg/name-lookup-stable.check
@@ -6,6 +6,6 @@ import ColumnOption._
name-lookup-stable.scala:17: error: reference to PrimaryKey is ambiguous;
it is both defined in class A and imported subsequently by
import ColumnOption._
- PrimaryKey // was already ambigious in 2.10.3
+ PrimaryKey // was already ambiguous in 2.10.3
^
two errors found
diff --git a/test/files/neg/name-lookup-stable.scala b/test/files/neg/name-lookup-stable.scala
index 0d862f06e1..2941e05875 100644
--- a/test/files/neg/name-lookup-stable.scala
+++ b/test/files/neg/name-lookup-stable.scala
@@ -14,7 +14,7 @@ class A {
(null: Any) match { case PrimaryKey => }
- PrimaryKey // was already ambigious in 2.10.3
+ PrimaryKey // was already ambiguous in 2.10.3
}
}
diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala
index 8da3868566..b1ba41bd54 100644
--- a/test/files/neg/t5376.scala
+++ b/test/files/neg/t5376.scala
@@ -12,7 +12,7 @@ object Test {
"a": Int
}
- // Import one implict and one non-implicit method with the
+ // Import one implicit and one non-implicit method with the
// same name in the same scope.
def m2 = {
import O1._
diff --git a/test/files/neg/t6666d.check b/test/files/neg/t6666d.check
deleted file mode 100644
index b4785f0129..0000000000
--- a/test/files/neg/t6666d.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t6666d.scala:7: error: Implementation restriction: access of object TreeOrd$1 from object TreeOrd$2, would require illegal premature access to the unconstructed `this` of class Test
- implicit object TreeOrd extends Ordering[K](){
- ^
-one error found
diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check
new file mode 100644
index 0000000000..497ef35070
--- /dev/null
+++ b/test/files/neg/t6810.check
@@ -0,0 +1,28 @@
+t6810.scala:4: error: unclosed character literal
+ val y = '
+ ^
+t6810.scala:5: error: unclosed character literal
+' // but not embedded EOL sequences not represented as escapes
+^
+t6810.scala:9: error: unclosed string literal
+ val Y = "
+ ^
+t6810.scala:10: error: unclosed string literal
+" // obviously not
+^
+t6810.scala:20: error: unclosed quoted identifier
+ val `
+ ^
+t6810.scala:21: error: unclosed quoted identifier
+` = EOL // not raw string literals aka triple-quoted, multiline strings
+^
+t6810.scala:24: error: unclosed character literal
+ val b = '
+ ^
+t6810.scala:25: error: unclosed character literal
+' // CR seen as EOL by scanner
+^
+t6810.scala:24: error: '=' expected but ';' found.
+ val b = '
+^
+9 errors found
diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala
new file mode 100644
index 0000000000..50c305d70c
--- /dev/null
+++ b/test/files/neg/t6810.scala
@@ -0,0 +1,26 @@
+
+trait t6810 {
+ val x = '\u000A' // char literals accept arbitrary unicode escapes
+ val y = '
+' // but not embedded EOL sequences not represented as escapes
+ val z = '\n' // normally, expect this escape
+
+ val X = "\u000A" // it's the same as ordinary string literals
+ val Y = "
+" // obviously not
+ val Z = "\n" // normally, expect this escape
+
+ val A = """
+""" // which is what these are for
+ val B = s"""
+""" // or the same for interpolated strings
+
+ import scala.compat.Platform.EOL
+ val `\u000A` = EOL // backquoted identifiers are arbitrary string literals
+ val `
+` = EOL // not raw string literals aka triple-quoted, multiline strings
+
+ val a = '\u000D' // similar treatment of CR
+ val b = ' ' // CR seen as EOL by scanner
+ val c = '\r' // traditionally
+}
diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala
index b29d591cb1..cbf0bf1c5a 100644
--- a/test/files/neg/t8597b.scala
+++ b/test/files/neg/t8597b.scala
@@ -4,7 +4,7 @@ object Unchecked {
// t is a fresh pattern type variable, despite our attempts to
// backtick our way to the enclosing `t`. Under this interpretation,
- // the absense of an unchecked warning is expected.
+ // the absence of an unchecked warning is expected.
(null: Any) match {
case _: Some[t] => // no warn
}
diff --git a/test/files/neg/t8675b.scala b/test/files/neg/t8675b.scala
index bffed2141c..b2212fa234 100644
--- a/test/files/neg/t8675b.scala
+++ b/test/files/neg/t8675b.scala
@@ -9,7 +9,7 @@ object Test {
}
trait Reportable1[Params, R]
- // "missing paramater type" error was swallowed in 2.11.0 leading to a crash
+ // "missing parameter type" error was swallowed in 2.11.0 leading to a crash
// in the backend.
//
// This error is itself a regression (or at least a change) in 2.11.0-M7,
diff --git a/test/files/neg/virtpatmat_exhaust_compound.scala b/test/files/neg/virtpatmat_exhaust_compound.scala
index 386c7af98d..4ff04dd06a 100644
--- a/test/files/neg/virtpatmat_exhaust_compound.scala
+++ b/test/files/neg/virtpatmat_exhaust_compound.scala
@@ -10,7 +10,7 @@ case object O3 extends Base2
case object O4 extends Base with Base2
object Test {
- val a /*: Product with Serialiable with Base */ = if (true) O1 else O2
+ val a /*: Product with Serializable with Base */ = if (true) O1 else O2
a match {
case null =>
}
diff --git a/test/files/pos/t2405.scala b/test/files/pos/t2405.scala
index 224b2ce83b..0bc7a771b2 100644
--- a/test/files/pos/t2405.scala
+++ b/test/files/pos/t2405.scala
@@ -6,14 +6,14 @@ object Test1 {
implicitly[Int]
}
-// Testing for the absense of shadowing #1.
+// Testing for the absence of shadowing #1.
object Test2 {
import A.{x => y}
val x = 2
implicitly[Int]
}
-// Testing for the absense of shadowing #2.
+// Testing for the absence of shadowing #2.
object Test3 {
{
import A.{x => y}
diff --git a/test/files/neg/t6666d.scala b/test/files/pos/t6666d.scala
index 49a688f91b..49a688f91b 100644
--- a/test/files/neg/t6666d.scala
+++ b/test/files/pos/t6666d.scala
diff --git a/test/files/pos/t8002-nested-scope.scala b/test/files/pos/t8002-nested-scope.scala
index a2088bce7a..8ce809e556 100644
--- a/test/files/pos/t8002-nested-scope.scala
+++ b/test/files/pos/t8002-nested-scope.scala
@@ -1,5 +1,5 @@
// This test serves to capture the status quo, but should really
-// emit an accessibiltiy error.
+// emit an accessibility error.
// `Namers#companionSymbolOf` seems too lenient, and currently doesn't
// implement the same-scope checks mentioned:
diff --git a/test/files/pos/t9442.scala b/test/files/pos/t9442.scala
new file mode 100644
index 0000000000..2ea81e79cb
--- /dev/null
+++ b/test/files/pos/t9442.scala
@@ -0,0 +1,14 @@
+trait Ctx {
+ trait Tree
+}
+trait Lst[+A] {
+ def zip[A1 >: A, B](that: Lst[B]): Nothing
+}
+class C[@specialized(Int) T] {
+ def moo(t: T) = {
+ def foo1(c: Ctx)(l: Lst[c.Tree]) = l zip l
+ def foo2(c: Ctx)(l: Lst[c.Tree]*) = l(0) zip l(1)
+ def foo3(c: Ctx)(l: => Lst[c.Tree]) = l zip l
+ ???
+ }
+}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 670d6f49aa..5444cf2088 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -133,16 +133,16 @@ y: String = hello
scala>
scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
-<console>:11: error: not found: value e
+<console>:12: error: not found: value e
val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
^
-<console>:11: error: not found: value f
+<console>:12: error: not found: value f
val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
^
-<console>:11: error: not found: value g
+<console>:12: error: not found: value g
val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
^
-<console>:11: error: not found: value h
+<console>:12: error: not found: value h
val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
^
diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala
index fd3f2a996a..2291b22f7a 100644
--- a/test/files/run/dead-code-elimination.scala
+++ b/test/files/run/dead-code-elimination.scala
@@ -10,7 +10,7 @@
// the stack after code elimination.
//
// Originally, this did not compile, but I included it in the run
-// tests because this was ASM-dependand and did not happen for GenJVM.
+// tests because this was ASM-dependent and did not happen for GenJVM.
//
// Thus, we run the code and force the loading of class B -- if the
// bytecode is incorrect, it will fail the test.
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
index 5a8d0ad9d3..ea698cec59 100644
--- a/test/files/run/idempotency-case-classes.check
+++ b/test/files/run/idempotency-case-classes.check
@@ -47,8 +47,7 @@ C(2,3)
case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
scala.this.None
else
- Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y));
- <synthetic> private def readResolve(): Object = C
+ Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y))
};
Predef.println(C.apply(2, 3))
}
diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check
index e050fb4bc1..560529ba03 100644
--- a/test/files/run/kind-repl-command.check
+++ b/test/files/run/kind-repl-command.check
@@ -19,7 +19,7 @@ scala> :k new { def empty = false }
AnyRef{def empty: Boolean}'s kind is A
scala> :k Nonexisting
-<console>:11: error: not found: value Nonexisting
+<console>:12: error: not found: value Nonexisting
Nonexisting
^
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 7fb4a04546..4159dbdf91 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -192,7 +192,7 @@ object Test extends App {
println(argName) // should be 4
test5 { argName = 5 }
println(argName) // should be 5
- val a: Unit = test1(a = 10, b = "2") // local values a and b exist, but not ambiuous since they're val's
+ val a: Unit = test1(a = 10, b = "2") // local values a and b exist, but it's not ambiguous since they're vals
// dependent types and copy method
diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala
index 5c5a20fa3b..454539a4b1 100644
--- a/test/files/run/nothingTypeNoOpt.scala
+++ b/test/files/run/nothingTypeNoOpt.scala
@@ -26,7 +26,7 @@ class C {
}
def f5(x: Boolean) = {
- // stack heights need to be the smae. ??? looks to the jvm like returning a value of
+ // stack heights need to be the same. ??? looks to the jvm like returning a value of
// type Nothing$, need to drop or throw it.
println(
if (x) { ???; 10 }
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
index 025d377a43..32ed876356 100644
--- a/test/files/run/reify-repl-fail-gracefully.check
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -8,7 +8,7 @@ import scala.reflect.runtime.universe._
scala>
scala> reify
-<console>:15: error: too few argument lists for macro invocation
+<console>:16: error: too few argument lists for macro invocation
reify
^
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
index e69dc60199..985f646579 100644
--- a/test/files/run/reify_newimpl_22.check
+++ b/test/files/run/reify_newimpl_22.check
@@ -15,7 +15,7 @@ scala> {
}
println(code.eval)
}
-<console>:18: free term: Ident(TermName("x")) defined by res0 in <console>:17:14
+<console>:19: free term: Ident(TermName("x")) defined by res0 in <console>:18:14
val code = reify {
^
2
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
index 1356d509d3..f60113c69f 100644
--- a/test/files/run/reify_newimpl_23.check
+++ b/test/files/run/reify_newimpl_23.check
@@ -14,7 +14,7 @@ scala> def foo[T]{
}
println(code.eval)
}
-<console>:16: free type: Ident(TypeName("T")) defined by foo in <console>:15:16
+<console>:17: free type: Ident(TypeName("T")) defined by foo in <console>:16:16
val code = reify {
^
foo: [T]=> Unit
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
index e512cfc52e..9104d8df0b 100644
--- a/test/files/run/reify_newimpl_25.check
+++ b/test/files/run/reify_newimpl_25.check
@@ -5,7 +5,7 @@ scala> {
val tt = implicitly[TypeTag[x.type]]
println(tt)
}
-<console>:14: free term: Ident(TermName("x")) defined by res0 in <console>:13:14
+<console>:15: free term: Ident(TermName("x")) defined by res0 in <console>:14:14
val tt = implicitly[TypeTag[x.type]]
^
TypeTag[x.type]
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index b203389db1..cbb21854ba 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -4,7 +4,7 @@ scala> def foo[T]{
val tt = implicitly[WeakTypeTag[List[T]]]
println(tt)
}
-<console>:12: free type: Ident(TypeName("T")) defined by foo in <console>:10:16
+<console>:13: free type: Ident(TypeName("T")) defined by foo in <console>:11:16
val tt = implicitly[WeakTypeTag[List[T]]]
^
foo: [T]=> Unit
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index f0c488455f..e0a1f4ecd6 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -1,12 +1,12 @@
scala> 2 ; 3
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
2 ;;
^
res0: Int = 3
scala> { 2 ; 3 }
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
{ 2 ; 3 }
^
res1: Int = 3
@@ -15,16 +15,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
1 +
2 +
3 } ; bippy+88+11
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
defined object Cow
diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check
index 35853f10da..6516f4ea90 100644
--- a/test/files/run/repl-parens.check
+++ b/test/files/run/repl-parens.check
@@ -18,10 +18,10 @@ scala> ( (2 + 2 ) )
res5: Int = 4
scala> 5 ; ( (2 + 2 ) ) ; ((5))
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; ( (2 + 2 ) ) ;;
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; ( (2 + 2 ) ) ;;
^
res6: Int = 5
@@ -38,16 +38,16 @@ res9: String = 4423
scala>
scala> 55 ; ((2 + 2)) ; (1, 2, 3)
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; ((2 + 2)) ;;
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; ((2 + 2)) ;;
^
res10: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; (x: Int) => x + 1 ;;
^
res11: () => Int = <function0>
@@ -58,7 +58,7 @@ scala> () => 5
res12: () => Int = <function0>
scala> 55 ; () => 5
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ;;
^
res13: () => Int = <function0>
diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check
index 5b6a84144d..dbf5363c0f 100644
--- a/test/files/run/repl-paste-2.check
+++ b/test/files/run/repl-paste-2.check
@@ -42,7 +42,7 @@ scala> res5 + res6
res1: Int = 690
scala> val x = dingus
-<console>:10: error: not found: value dingus
+<console>:11: error: not found: value dingus
val x = dingus
^
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 2a7b7783d9..4e030bd9fa 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -25,4 +25,7 @@ m: $r.treedsl.global.Literal = 10
scala> typed(m).tpe // typed is in scope
res2: $r.treedsl.global.Type = Int(10)
+scala> """escaping is hard, m'kah"""
+res3: String = escaping is hard, m'kah
+
scala> :quit
diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala
index 4dfeb37885..5ecaad8723 100644
--- a/test/files/run/repl-power.scala
+++ b/test/files/run/repl-power.scala
@@ -1,7 +1,9 @@
import scala.tools.partest.ReplTest
object Test extends ReplTest {
- def code = """
+ def tripleQuote(s: String) = "\"\"\"" + s + "\"\"\""
+
+ def code = s"""
:power
// guarding against "error: reference to global is ambiguous"
global.emptyValDef // "it is imported twice in the same scope by ..."
@@ -9,5 +11,6 @@ val tp = ArrayClass[scala.util.Random] // magic with tags
tp.memberType(Array_apply) // evidence
val m = LIT(10) // treedsl
typed(m).tpe // typed is in scope
+${tripleQuote("escaping is hard, m'kah")}
""".trim
}
diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check
index b0683fff79..cf4d9a149e 100644
--- a/test/files/run/repl-reset.check
+++ b/test/files/run/repl-reset.check
@@ -28,13 +28,13 @@ Forgetting all expression results and named terms: $intp, BippyBungus, x1, x2, x
Forgetting defined types: BippyBungus
scala> x1 + x2 + x3
-<console>:11: error: not found: value x1
+<console>:12: error: not found: value x1
x1 + x2 + x3
^
-<console>:11: error: not found: value x2
+<console>:12: error: not found: value x2
x1 + x2 + x3
^
-<console>:11: error: not found: value x3
+<console>:12: error: not found: value x3
x1 + x2 + x3
^
@@ -42,7 +42,7 @@ scala> val x1 = 4
x1: Int = 4
scala> new BippyBungus
-<console>:11: error: not found: type BippyBungus
+<console>:12: error: not found: type BippyBungus
new BippyBungus
^
diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check
index eb62729f5c..bbbf0dcdf1 100644
--- a/test/files/run/repl-serialization.check
+++ b/test/files/run/repl-serialization.check
@@ -20,6 +20,5 @@ u: U = U
evaluating O
constructing A
== reconstituting into a fresh classloader
- evaluating O
== evaluating reconstituted lambda
constructing A
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
index ea91e32239..c0814905f9 100644
--- a/test/files/run/repl-trim-stack-trace.scala
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -12,7 +12,7 @@ f: Nothing
scala> f
java.lang.Exception: Uh-oh
- at .f(<console>:10)
+ at .f(<console>:11)
... 69 elided
scala> def f = throw new Exception("")
@@ -20,7 +20,7 @@ f: Nothing
scala> f
java.lang.Exception:
- at .f(<console>:10)
+ at .f(<console>:11)
... 69 elided
scala> def f = throw new Exception
@@ -28,7 +28,7 @@ f: Nothing
scala> f
java.lang.Exception
- at .f(<console>:10)
+ at .f(<console>:11)
... 69 elided
scala> :quit"""
diff --git a/test/files/run/t1931.scala b/test/files/run/t1931.scala
new file mode 100644
index 0000000000..eedfa9b03d
--- /dev/null
+++ b/test/files/run/t1931.scala
@@ -0,0 +1,43 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+
+ def session =
+"""
+scala> val x: Any = 42
+x: Any = 42
+
+scala> x + " works"
+res0: String = 42 works
+
+scala> import Predef.{ any2stringadd => _, _ }
+import Predef.{any2stringadd=>_, _}
+
+scala> x + " works"
+<console>:14: error: value + is not a member of Any
+ x + " works"
+ ^
+
+scala> import Predef._
+import Predef._
+
+scala> x + " works"
+res2: String = 42 works
+
+scala> object Predef { def f = 42 }
+defined object Predef
+
+scala> import Predef._
+import Predef._
+
+scala> f
+<console>:14: error: not found: value f
+ f
+ ^
+
+scala> Predef.f
+res4: Int = 42
+
+scala> :quit"""
+}
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index 3ab3eaffd8..6e099222b0 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -5,7 +5,7 @@ scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() {
defined class Foo
scala> val f = new Foo
-<console>:11: warning: class Foo is deprecated: foooo
+<console>:12: warning: class Foo is deprecated: foooo
val f = new Foo
^
f: Foo = Bippy
diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala
index cbd87b5949..f2d1a8b3f8 100644
--- a/test/files/run/t4594-repl-settings.scala
+++ b/test/files/run/t4594-repl-settings.scala
@@ -15,7 +15,7 @@ object Test extends SessionTest {
|scala> :settings -deprecation
|
|scala> def b = depp
- |<console>:11: warning: method depp is deprecated: Please don't do that.
+ |<console>:12: warning: method depp is deprecated: Please don't do that.
| def b = depp
| ^
|b: String
diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check
index 857a5ab556..9191997624 100644
--- a/test/files/run/t5655.check
+++ b/test/files/run/t5655.check
@@ -6,7 +6,7 @@ scala> import x._
import x._
scala> x
-<console>:15: error: reference to x is ambiguous;
+<console>:16: error: reference to x is ambiguous;
it is imported twice in the same scope by
import x._
and import x
@@ -14,7 +14,7 @@ and import x
^
scala> x
-<console>:15: error: reference to x is ambiguous;
+<console>:16: error: reference to x is ambiguous;
it is imported twice in the same scope by
import x._
and import x
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index 31923e7119..4d8429e8f2 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -15,21 +15,21 @@ warning: there was one feature warning; re-run with -feature for details
convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
scala> convert(Some[Int](0))
-<console>:15: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+<console>:16: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
--- because ---
argument expression's type is not compatible with formal parameter type;
found : Some[Int]
required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } }
convert(Some[Int](0))
^
-<console>:15: error: type mismatch;
+<console>:16: error: type mismatch;
found : Some[Int]
required: F[_ <: F[_]]
convert(Some[Int](0))
^
scala> Range(1,2).toArray: Seq[_]
-<console>:14: error: polymorphic expression cannot be instantiated to expected type;
+<console>:15: error: polymorphic expression cannot be instantiated to expected type;
found : [B >: Int]Array[B]
required: Seq[_]
Range(1,2).toArray: Seq[_]
diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check
index f19c39e7e1..487daf4878 100644
--- a/test/files/run/t7747-repl.check
+++ b/test/files/run/t7747-repl.check
@@ -15,13 +15,13 @@ scala> val z = x * y
z: Int = 156
scala> 2 ; 3
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
2 ;;
^
res0: Int = 3
scala> { 2 ; 3 }
-<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
{ 2 ; 3 }
^
res1: Int = 3
@@ -30,16 +30,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
1 +
2 +
3 } ; bippy+88+11
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
defined object Cow
@@ -81,10 +81,10 @@ scala> ( (2 + 2 ) )
res10: Int = 4
scala> 5 ; ( (2 + 2 ) ) ; ((5))
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; ( (2 + 2 ) ) ;;
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; ( (2 + 2 ) ) ;;
^
res11: Int = 5
@@ -101,16 +101,16 @@ res14: String = 4423
scala>
scala> 55 ; ((2 + 2)) ; (1, 2, 3)
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; ((2 + 2)) ;;
^
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; ((2 + 2)) ;;
^
res15: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
-<console>:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ; (x: Int) => x + 1 ;;
^
res16: () => Int = <function0>
@@ -121,7 +121,7 @@ scala> () => 5
res17: () => Int = <function0>
scala> 55 ; () => 5
-<console>:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+<console>:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
55 ;;
^
res18: () => Int = <function0>
@@ -209,13 +209,13 @@ Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, C
Forgetting defined types: BippyBungus, Moo, Ruminant
scala> x1 + x2 + x3
-<console>:11: error: not found: value x1
+<console>:12: error: not found: value x1
x1 + x2 + x3
^
-<console>:11: error: not found: value x2
+<console>:12: error: not found: value x2
x1 + x2 + x3
^
-<console>:11: error: not found: value x3
+<console>:12: error: not found: value x3
x1 + x2 + x3
^
@@ -223,7 +223,7 @@ scala> val x1 = 4
x1: Int = 4
scala> new BippyBungus
-<console>:11: error: not found: type BippyBungus
+<console>:12: error: not found: type BippyBungus
new BippyBungus
^
diff --git a/test/files/run/t8047.scala b/test/files/run/t8047.scala
index f5660541e8..9ec8c1dc56 100644
--- a/test/files/run/t8047.scala
+++ b/test/files/run/t8047.scala
@@ -1,7 +1,7 @@
object Test extends App {
import scala.reflect.runtime.universe._
//
- // x's owner is outer Test scope. Previosly the quasiquote expansion
+ // x's owner is outer Test scope. Previously the quasiquote expansion
// looked like:
//
// object Test {
diff --git a/test/files/run/t8346.check b/test/files/run/t8346.check
deleted file mode 100644
index 1ba5c31abe..0000000000
--- a/test/files/run/t8346.check
+++ /dev/null
@@ -1,6 +0,0 @@
-BitSet: List(invariant, invariant, invariant, invariant)
-HashSet: List(covariant (true), covariant (true), covariant (true), covariant (true))
-ListSet: List(covariant (true), covariant (true), covariant (true), covariant (true))
-SortedSet: List(invariant, invariant, invariant, invariant)
-TreeSet: List(invariant, invariant, invariant, invariant)
-ValueSet: invariant
diff --git a/test/files/run/t8346.scala b/test/files/run/t8346.scala
deleted file mode 100644
index 5f3df84174..0000000000
--- a/test/files/run/t8346.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-object Test extends App {
- import reflect.ClassTag
-
- object SomeEnum extends Enumeration {
- val one, two, three, four = Value
- }
-
- def sctor[A <: Set[Int]](f: Int => A)(implicit A: ClassTag[A])
- : (String, Int => Set[Int]) =
- (A.runtimeClass.getSimpleName, f)
-
- val inits: Seq[(String, Int => Set[Int])] = {
- import collection.immutable.{Seq => _, _}
- Seq(sctor(BitSet(_)),
- sctor(HashSet(_)),
- sctor(ListSet(_)),
- sctor(SortedSet(_)),
- sctor(TreeSet(_)))
- }
-
- def sVarInfo[A](sa: Set[A]): String = {
- val saa = sa.toSet[Any]
- if (sa eq saa) s"""covariant (${(saa + "hi") contains "hi"})"""
- else "invariant"
- }
-
- inits foreach {case (name, singleton) =>
- print(s"${name}: ")
- val one = singleton(1)
- println(Seq(2,3,4).scanLeft(one)(_ + _) map sVarInfo toList)
- }
-
- println(s"ValueSet: ${sVarInfo(SomeEnum.values)}")
-}
diff --git a/test/files/run/t9170.scala b/test/files/run/t9170.scala
index d6cf516615..f39467bc25 100644
--- a/test/files/run/t9170.scala
+++ b/test/files/run/t9170.scala
@@ -8,17 +8,17 @@ object Test extends SessionTest {
def session =
"""
scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
-<console>:10: error: double definition:
-def f[A](a: => A): Int at line 10 and
-def f[A](a: => Either[Exception,A]): Int at line 10
+<console>:11: error: double definition:
+def f[A](a: => A): Int at line 11 and
+def f[A](a: => Either[Exception,A]): Int at line 11
have same type after erasure: (a: Function0)Int
object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
^
scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
-<console>:10: error: double definition:
-def f[A](a: => A): Int at line 10 and
-def f[A](a: => Either[Exception,A]): Int at line 10
+<console>:11: error: double definition:
+def f[A](a: => A): Int at line 11 and
+def f[A](a: => Either[Exception,A]): Int at line 11
have same type after erasure: (a: Function0)Int
object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
^
@@ -27,9 +27,9 @@ scala> object Y {
| def f[A](a: => A) = 1
| def f[A](a: => Either[Exception, A]) = 2
| }
-<console>:12: error: double definition:
-def f[A](a: => A): Int at line 11 and
-def f[A](a: => Either[Exception,A]): Int at line 12
+<console>:13: error: double definition:
+def f[A](a: => A): Int at line 12 and
+def f[A](a: => Either[Exception,A]): Int at line 13
have same type after erasure: (a: Function0)Int
def f[A](a: => Either[Exception, A]) = 2
^
@@ -44,9 +44,9 @@ object Y {
// Exiting paste mode, now interpreting.
-<console>:12: error: double definition:
-def f[A](a: => A): Int at line 11 and
-def f[A](a: => Either[Exception,A]): Int at line 12
+<console>:13: error: double definition:
+def f[A](a: => A): Int at line 12 and
+def f[A](a: => Either[Exception,A]): Int at line 13
have same type after erasure: (a: Function0)Int
def f[A](a: => Either[Exception, A]) = 2
^
diff --git a/test/files/run/t9206.scala b/test/files/run/t9206.scala
index 872c980fe4..406798104e 100644
--- a/test/files/run/t9206.scala
+++ b/test/files/run/t9206.scala
@@ -7,14 +7,14 @@ object Test extends SessionTest {
def session =
s"""|
|scala> val i: Int = "foo"
- |<console>:10: error: type mismatch;
+ |<console>:11: error: type mismatch;
| found : String("foo")
| required: Int
| val i: Int = "foo"
| ^
|
|scala> { val j = 42 ; val i: Int = "foo" + j }
- |<console>:11: error: type mismatch;
+ |<console>:12: error: type mismatch;
| found : String
| required: Int
| { val j = 42 ; val i: Int = "foo" + j }
diff --git a/test/files/run/t9375.check b/test/files/run/t9375.check
new file mode 100644
index 0000000000..8f43fab025
--- /dev/null
+++ b/test/files/run/t9375.check
@@ -0,0 +1,60 @@
+ konstruktor: class A
+ konstruktor: class A$O$12$
+ konstruktor: class A$$anon$1
+ konstruktor: class A$A
+ konstruktor: class A$C
+ konstruktor: class C
+ konstruktor: class T$O$15$
+ konstruktor: class T$$anon$2
+ konstruktor: class T$A
+ konstruktor: class T$C
+ konstruktor: class A$N$
+ konstruktor: class T$N$
+serializing outer objects should not initialize any nested objects
+now initializing nested objects
+ konstruktor: class A$O$
+ konstruktor: class A$Op$
+ konstruktor: class A$N$O$
+ konstruktor: class A$N$Op$
+ konstruktor: class A$A$O$
+ konstruktor: class A$A$Op$
+ konstruktor: class A$T$O$
+ konstruktor: class A$T$Op$
+ konstruktor: class A$O$11$
+ konstruktor: class A$O$13$
+ konstruktor: class A$$anon$1$O$
+ konstruktor: class A$$anon$1$Op$
+ konstruktor: class T$O$
+ konstruktor: class T$Op$
+ konstruktor: class T$N$O$
+ konstruktor: class T$N$Op$
+ konstruktor: class T$A$O$
+ konstruktor: class T$A$Op$
+ konstruktor: class T$T$O$
+ konstruktor: class T$T$Op$
+ konstruktor: class T$O$14$
+ konstruktor: class T$O$16$
+ konstruktor: class T$$anon$2$O$
+ konstruktor: class T$$anon$2$Op$
+no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself)
+deserializing outer objects with non-initialized inners again
+accessing modules triggers initialization
+ konstruktor: class A$O$
+ konstruktor: class A$Op$
+ konstruktor: class A$N$O$
+ konstruktor: class A$N$Op$
+deserializing creates a new object graph, including new scala 'object' instances, no matter where serialization starts
+init static module M and field v
+ konstruktor: class M$
+ konstruktor: class M$O$18$
+serDeser does not initialize nested static modules
+init M.O
+ konstruktor: class M$O$
+serDeser nested static module
+objects declared in field decls are not static modules, so they deserialize to new instances
+init lazy val M.w
+objects declared in lazy val are not static modules either
+ konstruktor: class M$O$19$
+object declared in a function: new instance created on each invocation
+ konstruktor: class M$O$20$
+ konstruktor: class M$O$20$
diff --git a/test/files/run/t9375.scala b/test/files/run/t9375.scala
new file mode 100644
index 0000000000..3995b38666
--- /dev/null
+++ b/test/files/run/t9375.scala
@@ -0,0 +1,276 @@
+import java.io._
+
+object SerDes {
+ def serialize(obj: AnyRef): Array[Byte] = {
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ buffer.toByteArray
+ }
+
+ def deserialize(a: Array[Byte]): AnyRef = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(a))
+ in.readObject
+ }
+
+ def serializeDeserialize[T <: AnyRef](obj: T) = deserialize(serialize(obj)).asInstanceOf[T]
+}
+
+import SerDes._
+
+// tests to make sure that de-serializing an object does not run its constructor
+
+trait S extends Serializable {
+ println(" konstruktor: " + this.getClass)
+}
+
+trait SE extends S {
+ def outer: Object
+}
+
+class A extends S {
+ object O extends SE { def outer = A.this }
+ private[this] object Op extends SE { def outer = A.this }
+ def P: SE = Op
+
+ object N extends S {
+ object O extends SE { def outer = N }
+ private[this] object Op extends SE { def outer = N }
+ def P: SE = Op
+ }
+
+ class A extends S {
+ object O extends SE { def outer = A.this }
+ private[this] object Op extends SE { def outer = A.this }
+ def P: SE = Op
+ }
+
+ trait T extends S {
+ object O extends SE { def outer = T.this }
+ private[this] object Op extends SE { def outer = T.this }
+ def P: SE = Op
+ }
+ class C extends T
+
+ def u: SE = {
+ object O extends SE { def outer = A.this }
+ O
+ }
+
+ val v: SE = {
+ object O extends SE { def outer = A.this }
+ O
+ }
+
+ val f: () => SE = () => {
+ object O extends SE { def outer = A.this }
+ O
+ }
+
+ trait GetObj { def O: SE; def P: SE }
+ val a: GetObj = new GetObj with S {
+ def anonThis = this
+ object O extends SE { def outer = anonThis }
+ private[this] object Op extends SE { def outer = anonThis }
+ def P: SE = Op
+ }
+}
+
+trait T extends S {
+ object O extends SE { def outer = T.this }
+ private[this] object Op extends SE { def outer = T.this }
+ def P: SE = Op
+
+ object N extends S {
+ object O extends SE { def outer = N }
+ private[this] object Op extends SE { def outer = N }
+ def P: SE = Op
+ }
+
+ class A extends S {
+ object O extends SE { def outer = A.this }
+ private[this] object Op extends SE { def outer = A.this }
+ def P: SE = Op
+ }
+
+ trait T extends S {
+ object O extends SE { def outer = T.this }
+ private[this] object Op extends SE { def outer = T.this }
+ def P: SE = Op
+ }
+ class C extends T
+
+ def u: SE = {
+ object O extends SE { def outer = T.this }
+ O
+ }
+
+ val v: SE = {
+ object O extends SE { def outer = T.this }
+ O
+ }
+
+ val f: () => SE = () => {
+ object O extends SE { def outer = T.this }
+ O
+ }
+
+ trait GetObj { def O: SE; def P: SE }
+ val a: GetObj = new GetObj with S {
+ def anonThis = this
+ object O extends SE { def outer = anonThis }
+ private[this] object Op extends SE { def outer = anonThis }
+ def P: SE = Op
+ }
+}
+
+class C extends T
+
+object DeserializeModuleNoConstructor {
+ def t(): Unit = {
+ val a = new A
+ val aa = new a.A
+ val ac = new a.C
+
+ val c = new C
+ val ca = new c.A
+ val cc = new c.C
+
+ val outers: List[Object] = List(
+ a, a.N, aa, ac, a.a,
+ c, c.N, ca, cc, c.a
+ )
+
+ println("serializing outer objects should not initialize any nested objects")
+
+ val serANotInit = serialize(a)
+ outers foreach serializeDeserialize
+
+ println("now initializing nested objects")
+
+ val os: List[(SE, Object)] = List(
+ a.O -> a,
+ a.P -> a,
+ a.N.O -> a.N,
+ a.N.P -> a.N,
+ aa.O -> aa,
+ aa.P -> aa,
+ ac.O -> ac,
+ ac.P -> ac,
+ a.u -> a,
+ a.v -> a,
+ a.f() -> a,
+ a.a.O -> a.a,
+ a.a.P -> a.a,
+
+ c.O -> c,
+ c.P -> c,
+ c.N.O -> c.N,
+ c.N.P -> c.N,
+ ca.O -> ca,
+ ca.P -> ca,
+ cc.O -> cc,
+ cc.P -> cc,
+ c.u -> c,
+ c.v -> c,
+ c.f() -> c,
+ c.a.O -> c.a,
+ c.a.P -> c.a
+ )
+
+ println("no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself)")
+
+ for ((obj, outer) <- os) {
+ assert(obj.outer eq outer, s"${obj.outer} of $obj -- $outer")
+ serializeDeserialize(obj)
+ serializeDeserialize(outer)
+ }
+
+ println("deserializing outer objects with non-initialized inners again")
+ val aNotInit = deserialize(serANotInit).asInstanceOf[A]
+
+ println("accessing modules triggers initialization")
+ aNotInit.O
+ aNotInit.P
+ aNotInit.N.O
+ aNotInit.N.P
+
+ println("deserializing creates a new object graph, including new scala 'object' instances, no matter where serialization starts")
+ val deserializedAs: List[A] = List(
+ serializeDeserialize(a),
+ serializeDeserialize(a.O).outer.asInstanceOf[A],
+ serializeDeserialize(a.P).outer.asInstanceOf[A],
+ serializeDeserialize(a.v).outer.asInstanceOf[A]
+ )
+ for (aSD <- deserializedAs) {
+ assert(aSD ne a)
+ assert(aSD.O ne a.O)
+ assert(aSD.P ne a.P)
+ assert(aSD.N ne a.N)
+ assert(aSD.N.O ne a.N.O)
+ assert(aSD.N.P ne a.N.P)
+ assert(aSD.v ne a.v)
+ assert(aSD.a.O ne a.a.O)
+ assert(aSD.a.P ne a.a.P)
+ }
+ }
+}
+
+// tests for serializing / deserializing static modules
+
+object M extends S {
+ object O extends S
+
+ def u: S = {
+ object O extends S
+ O
+ }
+
+ val v: S = {
+ object O extends S
+ O
+ }
+
+ lazy val w: S = {
+ object O extends S
+ O
+ }
+
+ val f: () => S = () => {
+ object O extends S
+ O
+ }
+}
+
+object SerializingStaticModules {
+ def t(): Unit = {
+ println("init static module M and field v")
+ M
+
+ println("serDeser does not initialize nested static modules")
+ assert(serializeDeserialize(M) eq M)
+
+ println("init M.O")
+ M.O
+
+ println("serDeser nested static module")
+ assert(serializeDeserialize(M.O) eq M.O)
+
+ println("objects declared in field decls are not static modules, so they deserialize to new instances")
+ assert(serializeDeserialize(M.v) ne M.v)
+
+ println("init lazy val M.w")
+
+ println("objects declared in lazy val are not static modules either")
+ assert(serializeDeserialize(M.w) ne M.w)
+
+ println("object declared in a function: new instance created on each invocation")
+ assert(M.f() ne M.f())
+ }
+}
+
+
+object Test extends App {
+ DeserializeModuleNoConstructor.t()
+ SerializingStaticModules.t()
+}
diff --git a/test/files/run/t9425.scala b/test/files/run/t9425.scala
new file mode 100644
index 0000000000..f251cc8579
--- /dev/null
+++ b/test/files/run/t9425.scala
@@ -0,0 +1,8 @@
+class C { case class Foo private (x: Int); Foo.apply(0) }
+
+object Test {
+ def test(c: C) = {import c.Foo; Foo.apply(0)}
+ def main(args: Array[String]): Unit = {
+ test(new C)
+ }
+}
diff --git a/test/files/run/toolbox_expand_macro.check b/test/files/run/toolbox_expand_macro.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/toolbox_expand_macro.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/toolbox_expand_macro.scala b/test/files/run/toolbox_expand_macro.scala
new file mode 100644
index 0000000000..a52e449168
--- /dev/null
+++ b/test/files/run/toolbox_expand_macro.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox}
+
+object Test extends App {
+ val toolBox = cm.mkToolBox()
+ val x = 21
+ val runtimeMacro =
+ q"""object RuntimeMacro {
+ import scala.reflect.macros.whitebox.Context
+ import scala.language.experimental.macros
+
+ def add(y: Int): Int = macro addImpl
+ def addImpl(c: Context)(y: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val x = $x
+ c.Expr[Int](q"$$x + $$y")
+ }
+ }"""
+ val s = toolBox.define(runtimeMacro)
+ println(toolBox.eval(q"$s.add(21)"))
+}
diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check
index 79ce544493..cd860bf394 100644
--- a/test/files/run/xMigration.check
+++ b/test/files/run/xMigration.check
@@ -10,7 +10,7 @@ res1: Iterable[String] = MapLike(eis)
scala> :setting -Xmigration:any
scala> Map(1 -> "eis").values // warn
-<console>:11: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
`values` returns `Iterable[B]` rather than `Iterator[B]`.
Map(1 -> "eis").values // warn
^
@@ -24,7 +24,7 @@ res3: Iterable[String] = MapLike(eis)
scala> :setting -Xmigration:2.7
scala> Map(1 -> "eis").values // warn
-<console>:11: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
`values` returns `Iterable[B]` rather than `Iterator[B]`.
Map(1 -> "eis").values // warn
^
@@ -38,7 +38,7 @@ res5: Iterable[String] = MapLike(eis)
scala> :setting -Xmigration // same as :any
scala> Map(1 -> "eis").values // warn
-<console>:11: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+<console>:12: warning: method values in trait MapLike has changed semantics in version 2.8.0:
`values` returns `Iterable[B]` rather than `Iterator[B]`.
Map(1 -> "eis").values // warn
^
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
deleted file mode 100644
index 4cfacaf407..0000000000
--- a/test/files/scalacheck/avl.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-import org.scalacheck.Gen
-import org.scalacheck.Prop.forAll
-import org.scalacheck.Properties
-
-package scala.collection.mutable {
-
- /**
- * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1]
- */
- abstract class AVLTreeTest(name: String) extends Properties(name) {
-
- def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2)
-
- def capacityMax(depth: Int): Int = `2^`(depth+1) - 1
-
- def minDepthForCapacity(x: Int): Int = {
- var depth = 0
- while(capacityMax(depth) < x)
- depth += 1
- depth
- }
-
- def numberOfElementsInLeftSubTree(n: Int): collection.immutable.IndexedSeq[Int] = {
- val mid = n/2 + n%2
- ((1 until mid)
- .filter { i => math.abs(minDepthForCapacity(i) - minDepthForCapacity(n-i)) < 2 }
- .flatMap { i => Seq(i, n-(i+1)) }).toIndexedSeq.distinct
- }
-
- def makeAllBalancedTree[A](elements: List[A]): List[AVLTree[A]] = elements match {
- case Nil => Leaf::Nil
- case first::Nil => Node(first, Leaf, Leaf)::Nil
- case first::second::Nil => Node(second, Node(first, Leaf, Leaf), Leaf)::Node(first, Leaf, Node(second, Leaf, Leaf))::Nil
- case first::second::third::Nil => Node(second, Node(first, Leaf, Leaf), Node(third, Leaf, Leaf))::Nil
- case _ => {
- val combinations = for {
- left <- numberOfElementsInLeftSubTree(elements.size)
- root = elements(left)
- right = elements.size - (left + 1)
- } yield (root, left, right)
- (combinations.flatMap(triple => for {
- l <- makeAllBalancedTree(elements.take(triple._2))
- r <- makeAllBalancedTree(elements.takeRight(triple._3))
- } yield Node(triple._1, l, r))).toList
- }
- }
-
- def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
- size <- org.scalacheck.Gen.choose(20, 25)
- elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
- selected <- org.scalacheck.Gen.choose(0, 1000)
- } yield {
- // selected mustn't be in elements already
- val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2))
- (selected*2+1, list)
- }
-
- def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
- size <- org.scalacheck.Gen.choose(20, 25)
- elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
- e = elements.sorted.distinct
- selected <- org.scalacheck.Gen.choose(0, e.size-1)
- } yield {
- // selected must be in elements already
- val list = makeAllBalancedTree(e)
- (e(selected), list)
- }
- }
-
- trait AVLInvariants {
- self: AVLTreeTest =>
-
- def isBalanced[A](t: AVLTree[A]): Boolean = t match {
- case node: Node[A] => math.abs(node.balance) < 2 && (List(node.left, node.right) forall isBalanced)
- case Leaf => true
- }
-
- def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b)
- }
-
- property("Every tree is initially balanced.") = setup(isBalanced)
- }
-
- object TestInsert extends AVLTreeTest("Insert") with AVLInvariants {
- import math.Ordering.Int
- property("`insert` creates a new tree containing the given element. The tree remains balanced.") = forAll(genInput) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => {
- val modifiedTree = tree.insert(selected, Int)
- modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
- }).fold(true)((a, b) => a && b)
- }
- }
-
- object TestRemove extends AVLTreeTest("Remove") with AVLInvariants {
- import math.Ordering.Int
- property("`remove` creates a new tree without the given element. The tree remains balanced.") = forAll(genInputDelete) {
- case (selected: Int, trees: List[AVLTree[Int]]) =>
- trees.map(tree => {
- val modifiedTree = tree.remove(selected, Int)
- tree.contains(selected, Int) && !modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
- }).fold(true)((a, b) => a && b)
- }
- }
-}
-
-object Test extends Properties("AVL") {
- include(scala.collection.mutable.TestInsert)
- include(scala.collection.mutable.TestRemove)
-}
diff --git a/test/junit/scala/collection/immutable/SetTests.scala b/test/junit/scala/collection/immutable/SetTests.scala
new file mode 100644
index 0000000000..28c7864359
--- /dev/null
+++ b/test/junit/scala/collection/immutable/SetTests.scala
@@ -0,0 +1,81 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class SetTests {
+ @Test
+ def test_SI8346_toSet_soundness(): Unit = {
+ val any2stringadd = "Disabled string conversions so as not to get confused!"
+
+ def any[A](set: Set[A]): Set[Any] = {
+ val anyset = set.toSet[Any]
+ assert((anyset + "fish") contains "fish")
+ anyset
+ }
+
+ // Make sure default immutable Set does not rebuild itself on widening with toSet
+ // Need to cover 0, 1, 2, 3, 4 elements as special cases
+ var si = Set.empty[Int]
+ assert(si eq si.toSet[Any])
+ for (i <- 1 to 5) {
+ val s1 = Set(Array.range(1, i+1): _*)
+ val s2 = si + i
+ val s1a = any(s1)
+ val s2a = any(s2)
+ assert(s1 eq s1a)
+ assert(s2 eq s2a)
+ si = s2
+ }
+
+ // Make sure BitSet correctly rebuilds itself on widening with toSet
+ // Need to cover empty, values 0-63, values 0-127 as special cases
+ val bitsets = Seq(BitSet.empty, BitSet(23), BitSet(23, 99), BitSet(23, 99, 141))
+ bitsets.foreach{ b =>
+ val ba = any(b)
+ assert(b ne ba)
+ assertEquals(b, ba)
+ }
+
+ // Make sure HashSet (and by extension, its implementing class HashTrieSet)
+ // does not rebuild itself on widening by toSet
+ val hashset = HashSet(1, 3, 5, 7)
+ val hashseta = any(hashset)
+ assert(hashset eq hashseta)
+
+ // Make sure ListSet does not rebuild itself on widening by toSet
+ // (Covers Node also, since it subclasses ListSet)
+ val listset = ListSet(1, 3, 5, 7)
+ val listseta = any(listset)
+ assert(listset eq listseta)
+
+ // Make sure SortedSets correctly rebuild themselves on widening with toSet
+ // Covers TreeSet and keySet of SortedMap also
+ val sortedsets = Seq(
+ SortedSet.empty[Int], SortedSet(5), SortedSet(1,2,3,5,4),
+ SortedMap(1 -> "cod", 2 -> "herring").keySet
+ )
+ sortedsets.foreach{ set =>
+ val seta = any(set)
+ assert(set ne seta)
+ assertEquals(set, seta)
+ }
+
+ // Make sure ValueSets correctly rebuild themselves on widening with toSet
+ object WeekDay extends Enumeration {
+ type WeekDay = Value
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+ val valuesa = any(WeekDay.values)
+ assert(WeekDay.values ne valuesa)
+ assertEquals(WeekDay.values, valuesa)
+
+ // Make sure regular Map keySets do not rebuild themselves on widening with toSet
+ val mapset = Map(1 -> "cod", 2 -> "herring").keySet
+ val mapseta = any(mapset)
+ assert(mapset eq mapseta)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala
index fad4e502eb..1b257aabc4 100644
--- a/test/junit/scala/collection/immutable/StreamTest.scala
+++ b/test/junit/scala/collection/immutable/StreamTest.scala
@@ -107,4 +107,20 @@ class StreamTest {
def withFilter_map_properly_lazy_in_tail: Unit = {
assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2))
}
+
+ @Test
+ def test_si9379() {
+ class Boom {
+ private var i = -1
+ def inc = {
+ i += 1
+ if (i > 1000) throw new NoSuchElementException("Boom! Too many elements!")
+ i
+ }
+ }
+ val b = new Boom
+ val s = Stream.continually(b.inc)
+ // zipped.toString must allow s to short-circuit evaluation
+ assertTrue((s, s).zipped.toString contains s.toString)
+ }
}
diff --git a/test/junit/scala/runtime/ScalaRunTimeTest.scala b/test/junit/scala/runtime/ScalaRunTimeTest.scala
index 9da197c71a..728d8c0ce9 100644
--- a/test/junit/scala/runtime/ScalaRunTimeTest.scala
+++ b/test/junit/scala/runtime/ScalaRunTimeTest.scala
@@ -67,4 +67,61 @@ class ScalaRunTimeTest {
val c = new C()
assertFalse(c.toString, isTuple(c))
}
+
+ @Test
+ def testStingOf() {
+ import ScalaRunTime.stringOf
+ import scala.collection._
+ import parallel.ParIterable
+
+ assertEquals("null", stringOf(null))
+ assertEquals( "\"\"", stringOf(""))
+
+ assertEquals("abc", stringOf("abc"))
+ assertEquals("\" abc\"", stringOf(" abc"))
+ assertEquals("\"abc \"", stringOf("abc "))
+
+ assertEquals("""Array()""", stringOf(Array.empty[AnyRef]))
+ assertEquals("""Array()""", stringOf(Array.empty[Int]))
+ assertEquals("""Array(1, 2, 3)""", stringOf(Array(1, 2, 3)))
+ assertEquals("""Array(a, "", " c", null)""", stringOf(Array("a", "", " c", null)))
+ assertEquals("""Array(Array("", 1, Array(5)), Array(1))""",
+ stringOf(Array(Array("", 1, Array(5)), Array(1))))
+
+ val map = Map(1->"", 2->"a", 3->" a", 4->null)
+ assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a, 3 -> " a", 4 -> null)""", stringOf(map))
+ assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a)""", stringOf(map, 2))
+
+ val iterable = Iterable("a", "", " c", null)
+ assertEquals(s"""${iterable.stringPrefix}(a, "", " c", null)""", stringOf(iterable))
+ assertEquals(s"""${iterable.stringPrefix}(a, "")""", stringOf(iterable, 2))
+
+ val parIterable = ParIterable("a", "", " c", null)
+ assertEquals(s"""${parIterable.stringPrefix}(a, "", " c", null)""", stringOf(parIterable))
+ assertEquals(s"""${parIterable.stringPrefix}(a, "")""", stringOf(parIterable, 2))
+
+ val traversable = new Traversable[Int] {
+ def foreach[U](f: Int => U): Unit = (0 to 3).foreach(f)
+ }
+ assertEquals(s"${traversable.stringPrefix}(0, 1, 2, 3)", stringOf(traversable))
+ assertEquals(s"${traversable.stringPrefix}(0, 1)", stringOf(traversable, 2))
+
+ val tuple1 = Tuple1(0)
+ assertEquals("(0,)", stringOf(tuple1))
+ assertEquals("(0,)", stringOf(tuple1, 0))
+
+ val tuple2 = Tuple2(0, 1)
+ assertEquals("(0,1)", stringOf(tuple2))
+ assertEquals("(0,1)", stringOf(tuple2, 0))
+
+ val tuple3 = Tuple3(0, 1, 2)
+ assertEquals("(0,1,2)", stringOf(tuple3))
+ assertEquals("(0,1,2)", stringOf(tuple3, 0))
+
+ val x = new Object {
+ override def toString(): String = "this is the stringOf string"
+ }
+ assertEquals(stringOf(x), "this is the stringOf string")
+ assertEquals(stringOf(x, 2), "this is the stringOf string")
+ }
}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
index 72f26d231d..155e0a6017 100644
--- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
+++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala
@@ -205,7 +205,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass {
def iincProdCons(): Unit = {
import Opcodes._
val m = genMethod(descriptor = "(I)I")(
- Incr(IINC, 1, 1), // producer and cosumer of local variable 1
+ Incr(IINC, 1, 1), // producer and consumer of local variable 1
VarOp(ILOAD, 1),
Op(IRETURN)
)