summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.md74
-rw-r--r--README.md161
-rw-r--r--build.number6
-rwxr-xr-xbuild.xml7
-rw-r--r--scripts/common153
-rwxr-xr-xscripts/jobs/integrate/bootstrap (renamed from scripts/jobs/scala-release-2.11.x-build)79
-rwxr-xr-xscripts/jobs/integrate/ide32
-rwxr-xr-xscripts/jobs/validate/publish-core44
-rwxr-xr-xscripts/jobs/validate/test17
-rw-r--r--spec/03-types.md4
-rw-r--r--spec/06-expressions.md2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala18
-rw-r--r--src/eclipse/README.md23
-rw-r--r--src/intellij-14/scala.ipr.SAMPLE16
-rwxr-xr-xsrc/intellij-14/setup.sh3
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE1
-rw-r--r--src/intellij/test-osgi.iml.SAMPLE23
-rw-r--r--src/library/scala/collection/IterableLike.scala3
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala31
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala1
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala1
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala17
-rw-r--r--src/library/scala/concurrent/package.scala74
-rw-r--r--src/library/scala/language.scala10
-rw-r--r--src/library/scala/languageFeature.scala10
-rw-r--r--src/library/scala/math/Ordering.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala20
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala24
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala1
-rw-r--r--test/files/jvm/serialization-new.check2
-rw-r--r--test/files/jvm/serialization.check2
-rw-r--r--test/files/neg/t7623.check21
-rw-r--r--test/files/neg/t7623.flags1
-rw-r--r--test/files/neg/t7623.scala38
-rw-r--r--test/files/neg/t9041.check4
-rw-r--r--test/files/neg/t9041.scala17
-rw-r--r--test/files/neg/t9093.check6
-rw-r--r--test/files/neg/t9093.scala5
-rw-r--r--test/files/pos/t5154.scala9
-rw-r--r--test/files/pos/t9050.scala13
-rw-r--r--test/files/pos/t9086.scala8
-rw-r--r--test/files/pos/t9123.flags1
-rw-r--r--test/files/pos/t9123.scala10
-rw-r--r--test/files/pos/t9135.scala16
-rw-r--r--test/files/run/bitsets.check1
-rw-r--r--test/files/run/t6502.check8
-rw-r--r--test/files/run/t6502.scala70
-rw-r--r--test/files/scalacheck/nan-ordering.scala16
-rw-r--r--test/junit/scala/StringContextTest.scala15
-rw-r--r--test/junit/scala/collection/IterableViewLikeTest.scala1
-rw-r--r--test/junit/scala/collection/immutable/StringLikeTest.scala37
-rw-r--r--test/junit/scala/collection/immutable/VectorTest.scala20
-rw-r--r--test/junit/scala/collection/mutable/LinkedHashMapTest.scala25
-rw-r--r--test/junit/scala/collection/mutable/LinkedHashSetTest.scala25
-rw-r--r--test/junit/scala/collection/mutable/MutableListTest.scala37
-rw-r--r--test/junit/scala/math/OrderingTest.scala61
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala5
-rw-r--r--test/junit/scala/tools/testing/AssertThrowsTest.scala2
-rw-r--r--test/junit/scala/tools/testing/AssertUtil.scala59
-rw-r--r--test/junit/scala/tools/testing/AssertUtilTest.scala21
-rw-r--r--versions.properties6
68 files changed, 1220 insertions, 242 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 1c05b4fd6b..e9505c26df 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,66 +1,52 @@
-# Scala Project & Developer Guidelines
+# Welcome! Thank you for contributing to Scala!
+We follow the standard GitHub [fork & pull](https://help.github.com/articles/using-pull-requests/#fork--pull) approach to pull requests. Just fork the official repo, develop in a branch, and submit a PR!
-These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way.
+You're always welcome to submit your PR straight away and start the discussion (without reading the rest of this wonderful doc, or the `READMEnot^H^H^H.md`). The goal of these notes is to make your experience contributing to Scala as smooth and pleasant as possible. We're happy to guide you through the process once you've submitted your PR.
-## General Workflow
+## The Scala Community
+Last year, you -- the Scala community -- matched the core team at EPFL in number of commits contributed to Scala 2.11, doubling the percentage of commits from outside EPFL/Typesafe since 2.10. Excellent work! (The split is roughly 25/25/50 for you/epfl/typesafe. By the way, the team at Typesafe is: @adriaanm, @gkossakowski, @lrytz and @retronym.)
-This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc.
+We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)!
-1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it.
-2. Before starting to work on a feature or a fix, it's good practice to ensure that:
- 1. There is a ticket for your work in the project's issue tracker. If not, create it first (perhaps given a thumbs up from the scala-internals mailing list first).
- 2. The ticket has been discussed and prioritized by the team.
-3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.)
-4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub.
-5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor-- members of Typesafe and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest -- the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assigning a review to a "sure win" reviewer is not a good long-term solution.
-6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me").
-7. Once the code has passed review the Pull Request can be merged into the distribution.
+This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala-internals, or tweet about it to @adriaanm.)
-## Pull Request Requirements
+## What kind of PR are you submitting?
-First, please have a look at and follow the [Pull Request Policy](https://github.com/scala/scala/wiki/Pull-Request-Policy) for guidelines on submitting a pull request to the Scala project.
+Regardless of the nature of your Pull Request, we have to ask you to sign the [Scala CLA](http://typesafe.com/contribute/cla/scala), to protect the OSS nature of the code base.
-In order for a Pull Request to be considered, it has to meet these requirements:
+### Documentation
+Whether you finally decided you couldn't stand that annoying typo anymore, you fixed the outdated code sample in some comment, or you wrote a nice, comprehensive, overview for an under-documented package, some docs for a class or the specifics about a method, your documentation improvement is very much appreciated, and we will do our best to fasttrack it.
-1. Live up to the current code standard:
- - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself).
- - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) should be applied.
-2. Tests are of paramount importance.
-3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below).
+You can make these changes directly in your browser in GitHub, or follow the same process as for code. Up to you!
-If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed.
+For bigger documentation changes, you may want to poll the (scala-internals) mailing list first, to quickly gauge whether others support the direction you're taking, so there won't be any surprises when it comes to reviewing your PR.
-## Documentation
+### Code
+For bigger changes, we do recommend announcing your intentions on scala-internals first, to avoid duplicated effort, or spending a lot of time reworking something we are not able to change at this time in the release cycle, for example.
-All contributed code should come accompanied with documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. important design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible.
+The kind of code we can accept depends on the life cycle for the release you're targeting. The current maintenance release (2.11.x) cannot break source/binary compatibility, which means public APIs cannot change. It also means we are reluctant to change, e.g., type inference or implicit search, as this can have unforeseen consequences for source compatibility.
+#### Bug Fix
-## Work In Progress
+Prefix your commit title with "SI-NNNN", where https://issues.scala-lang.org/browse/SI-NNNN tracks the bug you're fixing. We also recommend naming your branch after the Jira ticket number.
-It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc..
+Please make sure the Jira ticket's fix version corresponds to the upcoming milestone for the branch your PR targets (the CI automation will automatically assign the milestone after you open the PR).
-Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labelled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature.
+#### Enhancement or New Feature
-## Creating Commits And Writing Commit Messages
+For longer-running development, likely required for this category of code contributions, we suggest you include "topic" or "wip" in your branch name, to indicate that this is work in progress, and that others should be prepared to rebase if they branch off your branch.
-Follow these guidelines when creating public commits and writing commit messages.
+Any language change (including bug fixes) must be accompanied by the relevant updates to the spec, which lives in the same repository for this reason.
-1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, every commit should be able to be used in isolation-- that is, each commit must build and pass all tests.
-2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "SI-", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commit in a single line. If the commit is a small fix, then you are done. If not, go to 3.
-3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit.
-4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time):
- * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.)
- * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this).
- * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc)
+A new language feature requires a SIP (Scala Improvement Process) proposal. For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html).
-Example:
+#### Summary
- SI-4032 Implicit conversion visibility affected by presence of "this"
+1. We require regression tests for bug fixes. New features and enhancements must be supported by a respectable test suite.
+2. Documentation. Yep! Also required :-)
+3. Please follow these standard code standards, though in moderation (scouts quickly learn to let sleeping dogs lie):
+ - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself).
+ - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) should be applied.
- - Details 1
- - Details 2
- - Details 3
+Please also have a look at our [Pull Request Policy](https://github.com/scala/scala/wiki/Pull-Request-Policy), as well as the [Scala Hacker Guide](http://www.scala-lang.org/contribute/hacker-guide.html) by @xeno-by.
-## The Scala Improvement Process
-A new language feature requires a SIP (Scala Improvement Process) proposal. Note that significant additions to the standard library are also considered candidates for a SIP proposal.
-For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html).
diff --git a/README.md b/README.md
index fdc989228c..cb9701b3f2 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,157 @@
-This is the repository for the [Scala Programming Language](http://www.scala-lang.org).
+# Welcome!
+This is the official repository for the [Scala Programming Language](http://www.scala-lang.org).
- - [Report an issue](https://issues.scala-lang.org);
- - [Read about the development of the compiler and the standard library](http://docs.scala-lang.org/scala/);
- - [Check our Jenkins status](https://scala-webapps.epfl.ch/jenkins/);
+To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://typesafe.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature.
+
+For more information on building and developing the core of Scala, read on! Please also check out our [guidelines for contributing](CONTRIBUTING.md).
+
+We're still using Jira for issue reporting, so please [report any issues](https://issues.scala-lang.org) over there.
+(We would love to start using GitHub Issues, but we're too resource-constrained to take on this migration right now.)
+
+
+# Handy Links
+ - [A wealth of documentation](http://docs.scala-lang.org)
+ - [Scala CI](https://scala-ci.typesafe.com/)
+ - [Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins)
- [Download the latest nightly](https://scala-webapps.epfl.ch/jenkins/job/scala-nightly-main-master/ws/dists/latest/*zip*/latest.zip);
- - ... and contribute right here! Please, first read our [policy](http://docs.scala-lang.org/scala/pull-request-policy.html), our [development guidelines](CONTRIBUTING.md),
-and [sign the contributor's license agreement](http://typesafe.com/contribute/cla/scala).
+ - Scala mailing lists:
+ - [Compiler and standard library development](https://groups.google.com/group/scala-internals)
+ - [Users of Scala](https://groups.google.com/group/scala-user)
+ - [Scala language discussion](https://groups.google.com/group/scala-language)
+ - [Scala Improvement Process](https://groups.google.com/group/scala-sips)
+ - [Debate](https://groups.google.com/group/scala-debate)
+ - [Announcements](https://groups.google.com/group/scala-announce)
+
+# Repository structure
+
+```
+scala/
++--build.xml The main Ant build script, see also under src/build.
++--pull-binary-libs.sh Pulls binary artifacts from remote repository.
++--lib/ Pre-compiled libraries for the build.
++--src/ All sources.
+ +---/library Scala Standard Library.
+ +---/reflect Scala Reflection.
+ +---/compiler Scala Compiler.
+ +---/eclipse Eclipse project files.
+ +---/intellij-14 IntelliJ project templates.
++--scripts/ Scripts for the CI jobs (including building releases)
++--test/ The Scala test suite.
++--build/ [Generated] Build products output directory for ant.
++--dist/ [Generated] The destination folder for Scala distributions.
+```
+
+# How we roll
+
+## Requirements
+
+You'll need a Java SDK (6 or newer), Apache Ant (version 1.8.0 or above), and curl (for `./pull-binary-libs.sh`).
+
+## Git Hygiene
+
+As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 50-60 characters for the first line, wrapping subsequent ones at 80 (at most).
+
+When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base.
+
+Writing the commit message is a great sanity check that the commit is of the right size. If it does too many things, the description will be unwieldy and tedious to write. Chop it up (`git add -u --patch` and `git rebase` are your friends) and simplify!
+
+To pinpoint bugs, we often use git bisect, which is only effective when we can count on each commit building (and passing the test suite). Thus, the CI bot enforces this. Please rebase your development history into a sensible list of self-contained commits that tell the story of your bug fix or improvement. Carve them up so that the riskier bits can be reverted independently. Keep changes focussed by splitting out cleanups from refactorings from actual changes to the logic.
+
+This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one).
+
+Please do not @mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)).
+
+
+## Reviewing
+
+Please consider nominating a reviewer for your PR in the PR's description or a comment. If unsure, not to worry -- the core team will assign one for you.
+
+Your reviewer is also your mentor, who will help you rework your PR so that it meets our requirements. We strive to give timely feedback, and apologize for those times when we are overwhelmed by the volume of contributions. Please feel free to ping us. You are entitled to regular progress updates and at least a quick assessment of feasibility of a bigger PR.
+
+To help you plan your contributions, we communicate our plans on a regular basis on scala-internals, and deadlines are tracked as due dates for [GitHub milestones](https://github.com/scala/scala/milestones).
+
+Once you've established some history submitting PRs, we will invite you to become a reviewer for others's code. The main goal of this whole process, in the end, is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part!
+
+## IDE Setup
+### Eclipse
+Download the [Scala IDE bundle](http://scala-ide.org/download/sdk.html). It comes preconfigured for optimal performance.
+
+ - Run `ant init` to download some necessary jars.
+ - Import the project (in `src/eclipse`) via `File` → `Import Existing Projects into Workspace`. Check all projects and click ok.
+
+For important details on building, debugging and file encodings, please see [the excellent tutorial on scala-ide.org](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html) and the included README.md in src/eclipse.
+
+### IntelliJ 14
+Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
+
+The following steps are required to use IntelliJ IDEA on Scala trunk
+ - Run `ant init`. This will download some JARs to `./build/deps`, which are included in IntelliJ's classpath.
+ - Run src/intellij-14/setup.sh
+ - Open ./src/intellij-14/scala.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the Java 1.6 SDK.
+ (You may use a later SDK for local development, but the CI will verify against Java 6.)
+
+Compilation within IDEA is performed in "-Dlocker.skip=1" mode: the sources are built
+directly using the STARR compiler (which is downloaded from maven, according to `starr.version` in `versions.properties`).
+
+
+## Building with Ant
+
+NOTE: we are working on migrating the build to sbt.
+
+Run `ant build-opt` to build an optimized version of the compiler.
+Verify your build using `ant test-opt`.
+
+The Scala build system is based on Apache Ant. Most required pre-compiled
+libraries are part of the repository (in 'lib/'). The following however is
+assumed to be installed on the build machine:
+
+
+### Tips and tricks
+
+Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant).
+
+ - `./pull-binary-libs.sh` [downloads](http://typesafe.artifactoryonline.com/typesafe) all binary artifacts associated with this commit.
+ - `ant -p` prints out information about the commonly used ant targets.
+ - `ant` or `ant build`: A quick compilation (to build/quick) of your changes using the locker compiler.
+
+A typical debug cycle incrementally builds quick, then uses it to compile and run the file
+`sandbox/test.scala` as follows:
+
+ - `ant && build/quick/bin/scalac -d sandbox sandbox/test.scala && build/quick/bin/scala -cp sandbox Test`
+
+We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick/bin/scala -cp sandbox` to `qs` in our shell.
+
+`ant test-opt` tests that your code is working and fit to be committed:
+
+ - Runs the test suite and bootstrapping test on quick.
+ - You can run the suite only (skipping strap) with 'ant test.suite'.
+
+`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick.
+Note: on most machines this requires more heap than is allocate by default. You can adjust the parameters with ANT_OPTS. Example command line:
+
+```
+ANT_OPTS = "-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
+```
+
+ - `ant dist` builds a distribution in 'dists/latest'.
+ - `ant all.clean` Removes all build files and all distributions.
+
+### Bootstrapping concepts
+NOTE: This is somewhat outdated, but the ideas still hold.
+
+In order to guarantee the bootstrapping of the Scala compiler, the ant build
+compiles Scala in layers. Each layer is a complete compiled Scala compiler and library.
+A superior layer is always compiled by the layer just below it. Here is a short
+description of the four layers that the build uses, from bottom to top:
+
+ - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from maven central.
+ - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`).
+ - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code.
+ - `strap`: a test layer used to check stability of the build.
+
+For each layer, the Scala library is compiled first and the compiler next.
+That means that any changes in the library can immediately be used in the
+compiler without an intermediate build. On the other hand, if building the
+library requires changes in the compiler, a new locker must be built if
+bootstrapping is still possible, or a new starr if it is not.
diff --git a/build.number b/build.number
index dc85ecb777..5f8ed6d6b6 100644
--- a/build.number
+++ b/build.number
@@ -1,9 +1,9 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
version.minor=11
-version.patch=5
+version.patch=6
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
-# Note: To build a release run ant with -Dbuild.release=true
-# To build an RC, run ant with -Dmaven.version.suffix=-RCN
+# To build a release, see scripts/jobs/scala-release-2.11.x-build
+# (normally run by the eponymous job on scala-ci.typesafe.com). \ No newline at end of file
diff --git a/build.xml b/build.xml
index 02b98e66d8..f8e44c6f5c 100755
--- a/build.xml
+++ b/build.xml
@@ -280,7 +280,7 @@ TODO:
<!-- Pax runner -->
<property name="pax.exam.version" value="3.5.0"/><!-- Last version which supports Java 6 -->
- <property name="osgi.felix.version" value="4.0.3"/>
+ <property name="osgi.felix.version" value="4.4.0"/>
<property name="osgi.equinox.version" value="3.7.1"/>
<artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
<dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}">
@@ -294,6 +294,7 @@ TODO:
<dependency groupId="ch.qos.logback" artifactId="logback-classic" version="1.1.2"/>
<dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
</artifact:dependencies>
+ <copy-deps project="pax.exam"/>
<artifact:dependencies pathId="osgi.framework.felix">
<dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="${osgi.felix.version}"/>
@@ -1375,8 +1376,8 @@ TODO:
</target>
<target name="test.osgi" depends="test.osgi.comp">
- <if><isset property="has.java8"/><then>
- <echo message="Skipping OSGi JUnit tests on Java 8. See SI-8642"/>
+ <if><isset property="test.osgi.skip"/><then>
+ <echo message="Skipping OSGi JUnit tests"/>
</then><else>
<echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
<stopwatch name="test.osgi.timer"/>
diff --git a/scripts/common b/scripts/common
new file mode 100644
index 0000000000..b075469379
--- /dev/null
+++ b/scripts/common
@@ -0,0 +1,153 @@
+# This is for forcibly stopping the job from a subshell (see test
+# below).
+trap "exit 1" TERM
+export TOP_PID=$$
+set -e
+
+# Known problems : does not fare well with interrupted, partial
+# compilations. We should perhaps have a multi-dependency version
+# of do_i_have below
+
+LOGGINGDIR="$WORKSPACE/logs"
+mkdir -p $LOGGINGDIR
+
+unset SBT_HOME
+SBT_HOME="$WORKSPACE/.sbt"
+mkdir -p $SBT_HOME
+IVY_CACHE="$WORKSPACE/.ivy2"
+mkdir -p $IVY_CACHE
+rm -rf $IVY_CACHE/cache/org.scala-lang
+
+# temp dir where all 'non-build' operation are performed
+TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX)
+TMP_DIR="${TMP_ROOT_DIR}/tmp"
+mkdir "${TMP_DIR}"
+
+
+# detect sed version and how to enable extended regexes
+SEDARGS="-n$(if (echo "a" | sed -nE "s/a/b/" &> /dev/null); then echo E; else echo r; fi)"
+
+
+
+# :docstring test:
+# Usage: test <argument ..>
+# Executes <argument ..>, logging the launch of the command to the
+# main log file, and kills global script execution with the TERM
+# signal if the commands ends up failing.
+# DO NOT USE ON FUNCTIONS THAT DECLARE VARIABLES,
+# AS YOU'LL BE RUNNING IN A SUBSHELL AND VARIABLE DECLARATIONS WILL BE LOST
+# :end docstring:
+
+function test() {
+ echo "### $@"
+ "$@"
+ status=$?
+ if [ $status -ne 0 ]; then
+ say "### ERROR with $1"
+ kill -s TERM $TOP_PID
+ fi
+}
+
+# :docstring say:
+# Usage: say <argument ..>
+# Prints <argument ..> to both console and the main log file.
+# :end docstring:
+
+function say(){
+ (echo "$@") | tee -a $LOGGINGDIR/compilation-$SCALADATE-$SCALAHASH.log
+}
+
+# General debug logging
+# $* - message
+function debug () {
+ echo "----- $*"
+}
+
+function parseScalaProperties(){
+ propFile="$baseDir/$1"
+ if [ ! -f $propFile ]; then
+ echo "Property file $propFile not found."
+ exit 1
+ else
+ awk -f "$scriptsDir/readproperties.awk" "$propFile" > "$propFile.sh"
+ . "$propFile.sh" # yeah yeah, not that secure, improvements welcome (I tried, but bash made me cry again)
+ fi
+}
+
+
+## TAKEN FROM UBER-BUILD, except that it "returns" (via $RES) true/false
+# Check if an artifact is available
+# $1 - groupId
+# $2 - artifacId
+# $3 - version
+# $4 - extra repository to look in (optional)
+# return value in $RES
+function checkAvailability () {
+ pushd "${TMP_DIR}"
+ rm -rf *
+
+# pom file for the test project
+ cat > pom.xml << EOF
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.typesafe</groupId>
+ <artifactId>typesafeDummy</artifactId>
+ <packaging>war</packaging>
+ <version>1.0-SNAPSHOT</version>
+ <name>Dummy</name>
+ <url>http://127.0.0.1</url>
+ <dependencies>
+ <dependency>
+ <groupId>$1</groupId>
+ <artifactId>$2</artifactId>
+ <version>$3</version>
+ </dependency>
+ </dependencies>
+ <repositories>
+ <repository>
+ <id>sonatype.snapshot</id>
+ <name>Sonatype maven snapshot repository</name>
+ <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+ <snapshots>
+ <updatePolicy>daily</updatePolicy>
+ </snapshots>
+ </repository>
+EOF
+
+ if [ -n "$4" ]
+ then
+# adds the extra repository
+ cat >> pom.xml << EOF
+ <repository>
+ <id>extrarepo</id>
+ <name>extra repository</name>
+ <url>$4</url>
+ </repository>
+EOF
+ fi
+
+ cat >> pom.xml << EOF
+ </repositories>
+</project>
+EOF
+
+ set +e
+ mvn "${MAVEN_ARGS[@]}" compile &> "${TMP_DIR}/mvn.log"
+ RES=$?
+ # Quiet the maven, but allow diagnosing problems.
+ grep -i downloading "${TMP_DIR}/mvn.log"
+ grep -i exception "${TMP_DIR}/mvn.log"
+ grep -i error "${TMP_DIR}/mvn.log"
+ set -e
+
+# log the result
+ if [ ${RES} == 0 ]
+ then
+ debug "$1:$2:jar:$3 found !"
+ RES=true
+ else
+ debug "$1:$2:jar:$3 not found !"
+ RES=false
+ fi
+ popd
+}
diff --git a/scripts/jobs/scala-release-2.11.x-build b/scripts/jobs/integrate/bootstrap
index 21fbb8fa76..46d610018c 100755
--- a/scripts/jobs/scala-release-2.11.x-build
+++ b/scripts/jobs/integrate/bootstrap
@@ -1,4 +1,9 @@
#!/bin/bash -e
+# TODO: different scripts for the different phases -- usually we don't need to bootstrap the modules,
+# since we can use the previous version of scala for STARR as well as for compiling the modules (assuming it's binary compatible)
+# We should move away from the complicated bootstrap and set up our release schedule so we always have a previous build that satisfies these criteria.
+# (Potentially trivially, by splitting up this script, and publishing locker as if it were a real release.)
+
# requirements:
# sbtCmd must point to sbt from sbt-extras (this is the standard on the Scala jenkins, so we only support that one)
# - ~/.sonatype-curl that consists of user = USER:PASS
@@ -66,7 +71,6 @@
# set to something besides the default to build nightly snapshots of the modules instead of some tagged version
moduleVersioning=${moduleVersioning-"versions.properties"}
-baseDir=${WORKSPACE-`pwd`}
publishPrivateTask=${publishPrivateTask-"publish"}
publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"}
publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"}
@@ -88,67 +92,11 @@ forceRebuild=${forceRebuild-no}
antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak)
clean="clean" # TESTING leave empty to speed up testing
-scriptsDir="$WORKSPACE/scripts"
-
-# This is for forcibly stopping the job from a subshell (see test
-# below).
-trap "exit 1" TERM
-export TOP_PID=$$
-set -e
-
-# Known problems : does not fare well with interrupted, partial
-# compilations. We should perhaps have a multi-dependency version
-# of do_i_have below
-
-LOGGINGDIR="$WORKSPACE/logs"
-mkdir -p $LOGGINGDIR
-
-unset SBT_HOME
-SBT_HOME="$WORKSPACE/.sbt"
-mkdir -p $SBT_HOME
-IVY_CACHE="$WORKSPACE/.ivy2"
-mkdir -p $IVY_CACHE
-rm -rf $IVY_CACHE/cache/org.scala-lang
-
-# temp dir where all 'non-build' operation are performed
-TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX)
-TMP_DIR="${TMP_ROOT_DIR}/tmp"
-mkdir "${TMP_DIR}"
-
-
-# detect sed version and how to enable extended regexes
-SEDARGS="-n$(if (echo "a" | sed -nE "s/a/b/" &> /dev/null); then echo E; else echo r; fi)"
-
-
-# :docstring test:
-# Usage: test <argument ..>
-# Executes <argument ..>, logging the launch of the command to the
-# main log file, and kills global script execution with the TERM
-# signal if the commands ends up failing.
-# DO NOT USE ON FUNCTIONS THAT DECLARE VARIABLES,
-# AS YOU'LL BE RUNNING IN A SUBSHELL AND VARIABLE DECLARATIONS WILL BE LOST
-# :end docstring:
-
-function test() {
- echo "### $@"
- "$@"
- status=$?
- if [ $status -ne 0 ]; then
- say "### ERROR with $1"
- kill -s TERM $TOP_PID
- fi
-}
-
-# :docstring say:
-# Usage: say <argument ..>
-# Prints <argument ..> to both console and the main log file.
-# :end docstring:
-
-function say(){
- (echo "$@") | tee -a $LOGGINGDIR/compilation-$SCALADATE-$SCALAHASH.log
-}
+baseDir=${WORKSPACE-`pwd`}
+scriptsDir="$baseDir/scripts"
+. $scriptsDir/common
# we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala
# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)...
@@ -163,17 +111,6 @@ mkdir -p $baseDir/resolutionScratch_
privateCred="private-repo"
privateRepo="http://private-repo.typesafe.com/typesafe/scala-release-temp/"
-function parseScalaProperties(){
- propFile="$baseDir/$1"
- if [ ! -f $propFile ]; then
- echo "Property file $propFile not found."
- exit 1
- else
- awk -f "$scriptsDir/readproperties.awk" "$propFile" > "$propFile.sh"
- . "$propFile.sh" # yeah yeah, not that secure, improvements welcome (I tried, but bash made me cry again)
- fi
-}
-
##### git
gfxd() {
git clean -fxd # TESTING
diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide
new file mode 100755
index 0000000000..5c1e6199e3
--- /dev/null
+++ b/scripts/jobs/integrate/ide
@@ -0,0 +1,32 @@
+#!/bin/bash -e
+# requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below)
+# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout),
+# requires files: $baseDir/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...)
+
+# TODO: remove when integration is up and running
+if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi
+
+baseDir=${WORKSPACE-`pwd`}
+uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"}
+uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf"
+
+uberBuildDir="$baseDir/uber-build/"
+
+cd $baseDir
+if [[ -d $uberBuildDir ]]; then
+ ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd )
+else
+ git clone $uberBuildUrl
+fi
+
+echo "maven.version.number=$scalaVersion" >> versions.properties
+
+# pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide)
+# the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build)
+BASEDIR="$baseDir" prRepoUrl="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\
+ $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion
+
+# uber-build puts its local repo under target/m2repo
+# wipe the org/scala-lang part, which otherwise just keeps
+# growing and growing due to the -$sha-SNAPSHOT approach
+[[ -d $baseDir/target/m2repo/org/scala-lang ]] && rm -rf $baseDir/target/m2repo/org/scala-lang
diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core
new file mode 100755
index 0000000000..9dff5a34b0
--- /dev/null
+++ b/scripts/jobs/validate/publish-core
@@ -0,0 +1,44 @@
+#!/bin/bash -e
+# This script publishes the core of Scala to maven for use as locker downstream,
+# and saves the relevant properties used in its build artifacts, versions.properties.
+# (This means we'll use locker instead of quick downstream in dbuild.
+# The only downside is that backend improvements don't improve compiler performance itself until they are in STARR).
+# The version is suffixed with "-${sha:0:7}-SNAPSHOT"
+
+baseDir=${WORKSPACE-`pwd`}
+scriptsDir="$baseDir/scripts"
+. $scriptsDir/common
+
+case $prDryRun in
+ yep)
+ echo "DRY RUN"
+ mkdir -p build/pack ; mkdir -p dists/maven/latest
+ ;;
+ *)
+ sha=$(git rev-parse HEAD) # TODO: warn if $repo_ref != $sha (we shouldn't do PR validation using symbolic gitrefs)
+ echo "sha/repo_ref == $sha/$repo_ref ?"
+
+ parseScalaProperties build.number
+
+ ./pull-binary-libs.sh
+ # "noyoudont" is there juuuust in case
+ antDeployArgs="-Dmaven.version.suffix=\"-${sha:0:7}-SNAPSHOT\" -Dremote.snapshot.repository=$prRepoUrl -Drepository.credentials.id=pr-scala -Dremote.release.repository=noyoudont"
+
+ echo ">>> Getting Scala version number."
+ ant -q $antDeployArgs init
+ parseScalaProperties buildcharacter.properties # produce maven_version_number
+
+ echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl."
+ checkAvailability "org.scala-lang" "scala-library" "${maven_version_number}" $prRepoUrl; libraryAvailable=$RES
+ checkAvailability "org.scala-lang" "scala-reflect" "${maven_version_number}" $prRepoUrl; reflectAvailable=$RES
+ checkAvailability "org.scala-lang" "scala-compiler" "${maven_version_number}" $prRepoUrl; compilerAvailable=$RES
+
+ if $libraryAvailable && $reflectAvailable && $compilerAvailable; then
+ echo "Scala core already built!"
+ else
+ ant $antDeployArgs $antBuildArgs publish-opt-nodocs
+ fi
+
+ mv buildcharacter.properties jenkins.properties # parsed by the jenkins job
+ ;;
+esac
diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test
new file mode 100755
index 0000000000..c1c02c80cb
--- /dev/null
+++ b/scripts/jobs/validate/test
@@ -0,0 +1,17 @@
+#!/bin/bash -e
+
+case $prDryRun in
+ yep)
+ echo "DRY RUN"
+ ;;
+ *)
+ ./pull-binary-libs.sh
+
+ # build quick using STARR built upstream, as specified by scalaVersion
+ # (in that sense it's locker, since it was built with starr by that upstream job)
+ ant -Dstarr.version=$scalaVersion \
+ -Dscalac.args.optimise=-optimise \
+ -Dlocker.skip=1 -Dstarr.use.released=1 -Dextra.repo.url=$prRepoUrl \
+ $testExtraArgs ${testTarget-test.core docs.done}
+ ;;
+esac \ No newline at end of file
diff --git a/spec/03-types.md b/spec/03-types.md
index d067d45ab2..5658e15f44 100644
--- a/spec/03-types.md
+++ b/spec/03-types.md
@@ -167,8 +167,8 @@ SimpleType ::= SimpleType TypeArgs
TypeArgs ::= ‘[’ Types ‘]’
```
-A parameterized type $T[ U_1 , \ldots , U_n ]$ consists of a type
-designator $T$ and type parameters $U_1 , \ldots , U_n$ where
+A parameterized type $T[ T_1 , \ldots , T_n ]$ consists of a type
+designator $T$ and type parameters $T_1 , \ldots , T_n$ where
$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type
parameters $a_1 , \ldots , a_n$.
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
index bb6cc2a89a..133ec3c8e5 100644
--- a/spec/06-expressions.md
+++ b/spec/06-expressions.md
@@ -1122,7 +1122,7 @@ is `scala.Nothing`.
## Try Expressions
```ebnf
-Expr1 ::= `try' `{' Block `}' [`catch' `{' CaseClauses `}']
+Expr1 ::= `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}']
[`finally' Expr]
```
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index f1517e56a0..96939e616c 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -425,11 +425,10 @@ trait MarkupParsers {
if (ch != '/') ts append xPattern // child
else return false // terminate
- case '{' => // embedded Scala patterns
- while (ch == '{') {
- nextch()
+ case '{' if xCheckEmbeddedBlock => // embedded Scala patterns, if not double brace
+ do {
ts ++= xScalaPatterns
- }
+ } while (xCheckEmbeddedBlock)
assert(!xEmbeddedBlock, "problem with embedded block")
case SU =>
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index d174dc86c7..41ce0837cb 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -28,10 +28,11 @@ trait Warnings {
val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.")
// Experimental lint warnings that are turned off, but which could be turned on programmatically.
- // These warnings are said to blind those who dare enable them.
- // They are not activated by -Xlint and can't be enabled on the command line.
- val warnValueOverrides = { // Currently turned off as experimental. Created using constructor (new BS), so not available on the command line.
- val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation")
+ // They are not activated by -Xlint and can't be enabled on the command line because they are not
+ // created using the standard factory methods.
+
+ val warnValueOverrides = {
+ val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation.")
flag.value = false
flag
}
@@ -53,10 +54,11 @@ trait Warnings {
val TypeParameterShadow = LintWarning("type-parameter-shadow", "A local type parameter shadows a type already in scope.")
val PolyImplicitOverload = LintWarning("poly-implicit-overload", "Parameterized overloaded implicit methods are not visible as view bounds.")
val OptionImplicit = LintWarning("option-implicit", "Option.apply used implicit view.")
- val DelayedInitSelect = LintWarning("delayedinit-select", "Selecting member of DelayedInit")
+ val DelayedInitSelect = LintWarning("delayedinit-select", "Selecting member of DelayedInit.")
val ByNameRightAssociative = LintWarning("by-name-right-associative", "By-name parameter of right associative operator.")
val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.")
val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.")
+ val StarsAlign = LintWarning("stars-align", "Pattern sequence wildcard must align with sequence component.")
def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]]
}
@@ -77,6 +79,7 @@ trait Warnings {
def warnByNameRightAssociative = lint contains ByNameRightAssociative
def warnPackageObjectClasses = lint contains PackageObjectClasses
def warnUnsoundMatch = lint contains UnsoundMatch
+ def warnStarsAlign = lint contains StarsAlign
// Lint warnings that are currently -Y, but deprecated in that usage
@deprecated("Use warnAdaptedArgs", since="2.11.2")
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index d35aad964d..b2f2516b5b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -239,6 +239,11 @@ trait Interface extends ast.TreeDSL {
case Ident(_) => subst(from, to)
case _ => super.transform(tree)
}
+ tree1 match {
+ case _: DefTree =>
+ tree1.symbol.modifyInfo(_.substituteTypes(from, toTypes))
+ case _ =>
+ }
tree1.modifyType(_.substituteTypes(from, toTypes))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index 8924394b72..2753baa51d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -110,8 +110,10 @@ trait ScalacPatternExpanders {
err("Star pattern must correspond with varargs or unapplySeq")
else if (elementArity < 0)
arityError("not enough")
- else if (elementArity > 0 && !extractor.hasSeq)
+ else if (elementArity > 0 && !isSeq)
arityError("too many")
+ else if (settings.warnStarsAlign && isSeq && productArity > 0 && (elementArity > 0 || !isStar))
+ warn("A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).")
aligned
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index e6fa9a0142..e4255e5333 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -825,6 +825,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
orElse { _ =>
val resetTree = resetAttrs(original)
+ resetTree match {
+ case treeInfo.Applied(fun, targs, args) =>
+ if (fun.symbol != null && fun.symbol.isError)
+ // SI-9041 Without this, we leak error symbols past the typer!
+ // because the fallback typechecking notices the error-symbol,
+ // refuses to re-attempt typechecking, and presumes that someone
+ // else was responsible for issuing the related type error!
+ fun.setSymbol(NoSymbol)
+ case _ =>
+ }
debuglog(s"fallback on implicits: ${tree}/$resetTree")
val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
@@ -1177,7 +1187,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
- if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
+ if (mode.typingExprNotFun && pt.typeSymbol == UnitClass && !tree.tpe.isInstanceOf[MethodType])
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
@@ -1536,7 +1546,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val dummy = context.outer.owner.newLocalDummy(templ.pos)
+ // SI-9086 The position of this symbol is material: implicit search will avoid triggering
+ // cyclic errors in an implicit search in argument to the super constructor call on
+ // account of the "ignore symbols without complete info that succeed the implicit search"
+ // in this source file. See `ImplicitSearch#isValid` and `ImplicitInfo#isCyclicOrErroneous`.
+ val dummy = context.outer.owner.newLocalDummy(context.owner.pos)
val cscope = context.outer.makeNewScope(ctor, dummy)
if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file
val cbody2 = { // called both during completion AND typing.
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 5311651db5..03c7403b04 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -1,28 +1,9 @@
Eclipse project files
=====================
-The following points describe how to get Scala to run in Eclipse:
+The following points describe how to get Scala to run in Eclipse. Please also take a look at the [excellent tutorial on scala-ide.org](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html).
-0. To get Scala to work inside of Eclipse Kepler it is necessary to build the Scala IDE by your own
-because for the moment there is no update site provided for the newest development version
-of Scala. To do so enter the following commands one after the other:
-
- git clone https://github.com/scala-ide/scala-ide.git
- cd scala-ide
- ./build-all.sh clean install -Pscala-2.11.x -Peclipse-kepler -DskipTests
-
- After that you have an update site in `scala-ide/org.scala-ide.sdt.update-site/target/site`, which needs to be
-installed in Eclipse.
-
-0. The second thing that needs to be done is building Scala in order to get all necessary
-dependencies. To do that simply enter
-
- ant
-
- and wait until it is completed. To verify that everything has been built successfully, execute the REPL that can be found
-at `scala/build/pack/bin/scala`.
-
-0. Import all projects inside of Eclipse by choosing `File/Import Existing Projects`
+0. Import all projects into a [very recent version of Scala IDE for Eclipse](http://scala-ide.org/download/nightly.html) by choosing `File/Import Existing Projects`
and navigate to `scala/src/eclipse`. Check all projects and click ok.
0. You need to define a `path variable` inside Eclipse. Define `SCALA_BASEDIR` in
diff --git a/src/intellij-14/scala.ipr.SAMPLE b/src/intellij-14/scala.ipr.SAMPLE
index 7c2022f3a9..1e3d07466d 100644
--- a/src/intellij-14/scala.ipr.SAMPLE
+++ b/src/intellij-14/scala.ipr.SAMPLE
@@ -233,14 +233,14 @@
<library name="starr" type="Scala">
<properties>
<compiler-classpath>
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-2.11.2.jar" />
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar" />
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-#scala-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-#scala-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-#scala-version#.jar" />
</compiler-classpath>
</properties>
<CLASSES>
- <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-library-#scala-version#.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-#scala-version#.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
@@ -248,9 +248,9 @@
<library name="starr-no-deps" type="Scala">
<properties>
<compiler-classpath>
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-2.11.2.jar" />
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar" />
- <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-#scala-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-#scala-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-#scala-version#.jar" />
</compiler-classpath>
</properties>
<CLASSES />
diff --git a/src/intellij-14/setup.sh b/src/intellij-14/setup.sh
index ec303778ed..cf08898f24 100755
--- a/src/intellij-14/setup.sh
+++ b/src/intellij-14/setup.sh
@@ -12,3 +12,6 @@ for f in "$SCRIPT_DIR"/*.SAMPLE; do
g=${f%.SAMPLE}
cp $f $g
done
+
+SCALA_VERSION="`cat $SCRIPT_DIR/../../versions.properties | grep 'starr.version' | awk '{split($0,a,"="); print a[2]}'`"
+sed "s/#scala-version#/$SCALA_VERSION/g" $SCRIPT_DIR/scala.ipr.SAMPLE > $SCRIPT_DIR/scala.ipr \ No newline at end of file
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index c0614c946c..0cd3fdae6a 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -218,6 +218,7 @@
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
<module fileurl="file://$PROJECT_DIR$/test-junit.iml" filepath="$PROJECT_DIR$/test-junit.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test-osgi.iml" filepath="$PROJECT_DIR$/test-osgi.iml" />
</modules>
</component>
<component name="ProjectResources">
diff --git a/src/intellij/test-osgi.iml.SAMPLE b/src/intellij/test-osgi.iml.SAMPLE
new file mode 100644
index 0000000000..a589aaa0a9
--- /dev/null
+++ b/src/intellij/test-osgi.iml.SAMPLE
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test/osgi">
+ <sourceFolder url="file://$MODULE_DIR$/../../test/osgi/src" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="junit" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="scala-sdk" level="project" />
+ <orderEntry type="library" scope="PROVIDED" name="pax.exam-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 91ab1f6ac2..7643b84a8b 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -179,6 +179,7 @@ self =>
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
+ * "Sliding window" step is 1 by default.
* @see [[scala.collection.Iterator]], method `sliding`
*
* @param size the number of elements per group
@@ -194,7 +195,7 @@ self =>
*
* @param size the number of elements per group
* @param step the distance between the first elements of successive
- * groups (defaults to 1)
+ * groups
* @return An iterator producing ${coll}s of size `size`, except the
* last and the only element will be truncated if there are
* fewer elements than size.
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index f0daaf25a5..1ead894faf 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -10,7 +10,7 @@ package scala
package collection
package immutable
-import mutable.Builder
+import mutable.{ ArrayBuilder, Builder }
import scala.util.matching.Regex
import scala.math.ScalaNumber
import scala.reflect.ClassTag
@@ -203,8 +203,33 @@ self =>
private def escape(ch: Char): String = "\\Q" + ch + "\\E"
- @throws(classOf[java.util.regex.PatternSyntaxException])
- def split(separator: Char): Array[String] = toString.split(escape(separator))
+ def split(separator: Char): Array[String] = {
+ val thisString = toString
+ var pos = thisString.indexOf(separator)
+
+ if (pos != -1) {
+ val res = new ArrayBuilder.ofRef[String]
+
+ var prev = 0
+ do {
+ res += thisString.substring(prev, pos)
+ prev = pos + 1
+ pos = thisString.indexOf(separator, prev)
+ } while (pos != -1)
+
+ if (prev != thisString.size)
+ res += thisString.substring(prev, thisString.size)
+
+ val initialResult = res.result()
+ pos = initialResult.length
+ while (pos > 0 && initialResult(pos - 1).isEmpty) pos = pos - 1
+ if (pos != initialResult.length) {
+ val trimmed = new Array[String](pos)
+ Array.copy(initialResult, 0, trimmed, 0, pos)
+ trimmed
+ } else initialResult
+ } else Array[String](thisString)
+ }
@throws(classOf[java.util.regex.PatternSyntaxException])
def split(separators: Array[Char]): Array[String] = {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index c7da447f72..47a623a616 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -215,7 +215,7 @@ override def companion: GenericCompanion[Vector] = Vector
import Vector.{Log2ConcatFaster, TinyAppendFaster}
if (that.isEmpty) this.asInstanceOf[That]
else {
- val again = if (!that.isTraversableAgain) that.toVector else that
+ val again = if (!that.isTraversableAgain) that.toVector else that.seq
again.size match {
// Often it's better to append small numbers of elements (or prepend if RHS is a vector)
case n if n <= TinyAppendFaster || n < (this.size >> Log2ConcatFaster) =>
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index faa4155317..78150b5e88 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -160,6 +160,9 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
*
* @return an immutable set containing all the elements of this set.
*/
+ @deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " +
+ "BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " +
+ "immutability of the result.", "2.11.6")
def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems)
override def clone(): BitSet = {
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index b64504be3d..275f490675 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -160,6 +160,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
override def clear() {
clearTable()
firstEntry = null
+ lastEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 1768c946ed..756a2f73c1 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -112,6 +112,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
override def clear() {
clearTable()
firstEntry = null
+ lastEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index b852a4747b..646023f469 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -13,7 +13,6 @@ package mutable
import generic._
import immutable.{List, Nil}
-// !!! todo: convert to LinkedListBuffer?
/**
* This class is used internally to represent mutable lists. It is the
* basis for the implementation of the class `Queue`.
@@ -113,9 +112,21 @@ extends AbstractSeq[A]
}
}
- /** Returns an iterator over all elements of this list.
+ /** Returns an iterator over up to `length` elements of this list.
*/
- override def iterator: Iterator[A] = first0.iterator
+ override def iterator: Iterator[A] = if (isEmpty) Iterator.empty else
+ new AbstractIterator[A] {
+ var elems = first0
+ var count = len
+ def hasNext = count > 0 && elems.nonEmpty
+ def next() = {
+ if (!hasNext) throw new NoSuchElementException
+ count = count - 1
+ val e = elems.elem
+ elems = if (count == 0) null else elems.next
+ e
+ }
+ }
override def last = {
if (isEmpty) throw new NoSuchElementException("MutableList.empty.last")
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 4843d28679..d159dda414 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -12,6 +12,75 @@ import scala.concurrent.duration.Duration
import scala.annotation.implicitNotFound
/** This package object contains primitives for concurrent and parallel programming.
+ *
+ * == Guide ==
+ *
+ * A more detailed guide to Futures and Promises, including discussion and examples
+ * can be found at
+ * [[http://docs.scala-lang.org/overviews/core/futures.html]].
+ *
+ * == Common Imports ==
+ *
+ * When working with Futures, you will often find that importing the whole concurrent
+ * package is convenient, furthermore you are likely to need an implicit ExecutionContext
+ * in scope for many operations involving Futures and Promises:
+ *
+ * {{{
+ * import scala.concurrent._
+ * import ExecutionContext.Implicits.global
+ * }}}
+ *
+ * == Specifying Durations ==
+ *
+ * Operations often require a duration to be specified. A duration DSL is available
+ * to make defining these easier:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * val d: Duration = 10.seconds
+ * }}}
+ *
+ * == Using Futures For Non-blocking Computation ==
+ *
+ * Basic use of futures is easy with the factory method on Future, which executes a
+ * provided function asynchronously, handing you back a future result of that function
+ * without blocking the current thread. In order to create the Future you will need
+ * either an implicit or explicit ExecutionContext to be provided:
+ *
+ * {{{
+ * import scala.concurrent._
+ * import ExecutionContext.Implicits.global // implicit execution context
+ *
+ * val firstZebra: Future[Int] = Future {
+ * val source = scala.io.Source.fromFile("/etc/dictionaries-common/words")
+ * source.toSeq.indexOfSlice("zebra")
+ * }
+ * }}}
+ *
+ * == Avoid Blocking ==
+ *
+ * Although blocking is possible in order to await results (with a mandatory timeout duration):
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * Await.result(firstZebra, 10.seconds)
+ * }}}
+ *
+ * and although this is sometimes necessary to do, in particular for testing purposes, blocking
+ * in general is discouraged when working with Futures and concurrency in order to avoid
+ * potential deadlocks and improve performance. Instead, use callbacks or combinators to
+ * remain in the future domain:
+ *
+ * {{{
+ * val animalRange: Future[Int] = for {
+ * aardvark <- firstAardvark
+ * zebra <- firstZebra
+ * } yield zebra - aardvark
+ *
+ * animalRange.onSuccess {
+ * case x if x > 500000 => println("It's a long way from Aardvark to Zebra")
+ * }
+ * }}}
*/
package object concurrent {
type ExecutionException = java.util.concurrent.ExecutionException
@@ -70,6 +139,11 @@ package concurrent {
/**
* `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
+ *
+ * While occasionally useful, e.g. for testing, it is recommended that you avoid Await
+ * when possible in favor of callbacks and combinators like onComplete and use in
+ * for comprehensions. Await will block the thread on which it runs, and could cause
+ * performance and deadlock issues.
*/
object Await {
/**
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index c638f531bb..2eb5514a18 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
/**
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
index 1f411c412a..51118b43be 100644
--- a/src/library/scala/languageFeature.scala
+++ b/src/library/scala/languageFeature.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
import scala.annotation.meta
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 0d7ea8bce2..827cccc77e 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -284,6 +284,9 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x)
override def lt(x: Float, y: Float): Boolean = outer.lt(y, x)
override def gt(x: Float, y: Float): Boolean = outer.gt(y, x)
+ override def min(x: Float, y: Float): Float = outer.max(x, y)
+ override def max(x: Float, y: Float): Float = outer.min(x, y)
+
}
}
implicit object Float extends FloatOrdering
@@ -309,6 +312,8 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x)
override def lt(x: Double, y: Double): Boolean = outer.lt(y, x)
override def gt(x: Double, y: Double): Boolean = outer.gt(y, x)
+ override def min(x: Double, y: Double): Double = outer.max(x, y)
+ override def max(x: Double, y: Double): Double = outer.min(x, y)
}
}
implicit object Double extends DoubleOrdering
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index e91bfadc85..d5fc52abbf 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -2054,7 +2054,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* where it is the outer class of the enclosing class.
*/
final def outerClass: Symbol =
- if (owner.isClass) owner
+ if (this == NoSymbol) {
+ // ideally we shouldn't get here, but its better to harden against this than suffer the infinite loop in SI-9133
+ devWarningDumpStack("NoSymbol.outerClass", 15)
+ NoSymbol
+ } else if (owner.isClass) owner
else if (isClassLocalToConstructor) owner.enclClass.outerClass
else owner.outerClass
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 35de3adff6..fd918b8595 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1576,6 +1576,7 @@ trait Trees extends api.Trees {
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
+ private var mutatedSymbols: List[Symbol] = Nil
override def transform(tree: Tree): Tree = {
def subst(from: List[Symbol], to: List[Symbol]) {
if (!from.isEmpty)
@@ -1594,6 +1595,7 @@ trait Trees extends api.Trees {
|TreeSymSubstituter: updated info of symbol ${tree.symbol}
| Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
| New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ mutatedSymbols ::= tree.symbol
tree.symbol updateInfo newInfo
}
case _ =>
@@ -1613,7 +1615,23 @@ trait Trees extends api.Trees {
} else
super.transform(tree)
}
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
+ def apply[T <: Tree](tree: T): T = {
+ val tree1 = transform(tree)
+ invalidateSingleTypeCaches(tree1)
+ tree1.asInstanceOf[T]
+ }
+ private def invalidateSingleTypeCaches(tree: Tree): Unit = {
+ if (mutatedSymbols.nonEmpty)
+ for (t <- tree if t.tpe != null)
+ for (tp <- t.tpe) {
+ tp match {
+ case s: SingleType if mutatedSymbols contains s.sym =>
+ s.underlyingPeriod = NoPeriod
+ s.underlyingCache = NoType
+ case _ =>
+ }
+ }
+ }
override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 4fd5768b79..4d71e0e09e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -12,6 +12,7 @@ import scala.annotation.tailrec
import Predef.{ println => _, _ }
import interpreter.session._
import StdReplTags._
+import scala.tools.asm.ClassReader
import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import scala.reflect.classTag
@@ -633,28 +634,29 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
* the interpreter and replays all interpreter expressions.
*/
def require(arg: String): Unit = {
- class InfoClassLoader extends java.lang.ClassLoader {
- def classOf(arr: Array[Byte]): Class[_] =
- super.defineClass(null, arr, 0, arr.length)
- }
-
val f = File(arg).normalize
- if (f.isDirectory) {
- echo("Adding directories to the classpath is not supported. Add a jar instead.")
+ val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
+ if (jarFile == null) {
+ echo(s"Cannot load '$arg'")
return
}
- val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
-
def flatten(f: AbstractFile): Iterator[AbstractFile] =
if (f.isClassContainer) f.iterator.flatMap(flatten)
else Iterator(f)
val entries = flatten(jarFile)
- val cloader = new InfoClassLoader
- def classNameOf(classFile: AbstractFile): String = cloader.classOf(classFile.toByteArray).getName
+ def classNameOf(classFile: AbstractFile): String = {
+ val input = classFile.input
+ try {
+ val reader = new ClassReader(input)
+ reader.getClassName.replace('/', '.')
+ } finally {
+ input.close()
+ }
+ }
def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 28ddf2939c..ed69d449cb 100644
--- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -23,6 +23,7 @@ trait InteractiveReader {
def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
case 'y' => true
case 'n' => false
+ case -1 => false // EOF
case _ => alt
}
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index 1555135926..cb26446f40 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -1,4 +1,4 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 1555135926..cb26446f40 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,4 +1,4 @@
-warning: there were two deprecation warnings; re-run with -deprecation for details
+warning: there were three deprecation warnings; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
diff --git a/test/files/neg/t7623.check b/test/files/neg/t7623.check
new file mode 100644
index 0000000000..db368dd369
--- /dev/null
+++ b/test/files/neg/t7623.check
@@ -0,0 +1,21 @@
+t7623.scala:19: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def f = "" match { case X(s) => }
+ ^
+t7623.scala:21: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def g = "" match { case X(s, t) => }
+ ^
+t7623.scala:23: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def h = "" match { case X(s, t, u @ _*) => }
+ ^
+t7623.scala:9: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def f = C("") match { case C(s) => }
+ ^
+t7623.scala:11: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def g = C("") match { case C(s, t) => }
+ ^
+t7623.scala:13: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def h = C("") match { case C(s, t, u @ _*) => }
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t7623.flags b/test/files/neg/t7623.flags
new file mode 100644
index 0000000000..74c9e38323
--- /dev/null
+++ b/test/files/neg/t7623.flags
@@ -0,0 +1 @@
+-Xlint:stars-align -Xfatal-warnings
diff --git a/test/files/neg/t7623.scala b/test/files/neg/t7623.scala
new file mode 100644
index 0000000000..5c40f37bc1
--- /dev/null
+++ b/test/files/neg/t7623.scala
@@ -0,0 +1,38 @@
+
+
+case class C(s: String, xs: Int*)
+
+object X { def unapplySeq(a: Any): Option[(String, Seq[Int])] = Some("", List(1,2,3)) }
+
+// for case classes with varargs, avoid misaligned patterns
+trait Ctest {
+ def f = C("") match { case C(s) => }
+
+ def g = C("") match { case C(s, t) => }
+
+ def h = C("") match { case C(s, t, u @ _*) => }
+
+ def ok = C("") match { case C(s, u @ _*) => }
+}
+// for extractors that unapplySeq: Option[(Something, Seq[_])], avoid misaligned patterns
+trait Xtest {
+ def f = "" match { case X(s) => }
+
+ def g = "" match { case X(s, t) => }
+
+ def h = "" match { case X(s, t, u @ _*) => }
+
+ def ok = "" match { case X(s, u @ _*) => }
+}
+// for extractors that unapplySeq: Option[Seq[_]], anything goes
+trait Rtest {
+ val r = "(a+)".r
+
+ def f = "" match { case r(s) => }
+
+ def g = "" match { case r(s, t) => }
+
+ def h = "" match { case r(s, t, u @ _*) => }
+
+ def whatever = "" match { case r(u @ _*) => }
+}
diff --git a/test/files/neg/t9041.check b/test/files/neg/t9041.check
new file mode 100644
index 0000000000..669e9434e0
--- /dev/null
+++ b/test/files/neg/t9041.check
@@ -0,0 +1,4 @@
+t9041.scala:11: error: could not find implicit value for parameter cellSetter: CellSetter[scala.math.BigDecimal]
+ def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) }
+ ^
+one error found
diff --git a/test/files/neg/t9041.scala b/test/files/neg/t9041.scala
new file mode 100644
index 0000000000..2bdef0d3ae
--- /dev/null
+++ b/test/files/neg/t9041.scala
@@ -0,0 +1,17 @@
+// False negative test, requires overloading in Cell.
+
+trait Cell { def setCellValue(i: Int) = () ; def setCellValue(d: Double) = () }
+
+trait Nope {
+ def f = {
+ trait CellSetter[A] {
+ def setCell(cell: Cell, data: A): Unit
+ }
+ implicit val bigDecimalCellSetter = new CellSetter[math.BigDecimal]() {
+ def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) }
+ }
+ implicit class RichCell(cell: Cell) {
+ def setCellValue[A](data: A)(implicit cellSetter: CellSetter[A]) = cellSetter.setCell(cell, data)
+ }
+ }
+}
diff --git a/test/files/neg/t9093.check b/test/files/neg/t9093.check
new file mode 100644
index 0000000000..085a433f0b
--- /dev/null
+++ b/test/files/neg/t9093.check
@@ -0,0 +1,6 @@
+t9093.scala:3: error: polymorphic expression cannot be instantiated to expected type;
+ found : [C](f: C)Null
+ required: Unit
+ val x: Unit = apply2(0)/*(0)*/
+ ^
+one error found
diff --git a/test/files/neg/t9093.scala b/test/files/neg/t9093.scala
new file mode 100644
index 0000000000..d9922ad70e
--- /dev/null
+++ b/test/files/neg/t9093.scala
@@ -0,0 +1,5 @@
+object Main {
+ def apply2[C](fa: Any)(f: C) = null
+ val x: Unit = apply2(0)/*(0)*/
+}
+
diff --git a/test/files/pos/t5154.scala b/test/files/pos/t5154.scala
new file mode 100644
index 0000000000..2629308f00
--- /dev/null
+++ b/test/files/pos/t5154.scala
@@ -0,0 +1,9 @@
+
+trait Z {
+ // extra space made the pattern OK
+ def f = <z> {{3}}</z> match { case <z> {{3}}</z> => }
+
+ // lack of space: error: illegal start of simple pattern
+ def g = <z>{{3}}</z> match { case <z>{{3}}</z> => }
+}
+
diff --git a/test/files/pos/t9050.scala b/test/files/pos/t9050.scala
new file mode 100644
index 0000000000..b1ab09f901
--- /dev/null
+++ b/test/files/pos/t9050.scala
@@ -0,0 +1,13 @@
+final class Mu[F](val value: Any) extends AnyVal {
+ def cata(f: F) {
+ // crash
+ ((y: Mu[F]) => y.cata(f))
+ // crash
+ def foo(x : Mu[F]) = x.cata(f)
+
+ // // okay
+ def x: Mu[F] = ???
+ (() => x.cata(f))
+ assert(true, cata(f))
+ }
+}
diff --git a/test/files/pos/t9086.scala b/test/files/pos/t9086.scala
new file mode 100644
index 0000000000..fba34ee226
--- /dev/null
+++ b/test/files/pos/t9086.scala
@@ -0,0 +1,8 @@
+class X[A](a: A)
+object Test {
+ implicit val ImplicitBoolean: Boolean = true
+ def local = {
+ implicit object X extends X({ implicitly[Boolean] ; "" })
+ implicitly[X[String]] // failed in 2.11.5
+ }
+}
diff --git a/test/files/pos/t9123.flags b/test/files/pos/t9123.flags
new file mode 100644
index 0000000000..c16e2f71dc
--- /dev/null
+++ b/test/files/pos/t9123.flags
@@ -0,0 +1 @@
+-optimize -Ydelambdafy:method
diff --git a/test/files/pos/t9123.scala b/test/files/pos/t9123.scala
new file mode 100644
index 0000000000..22d55b4351
--- /dev/null
+++ b/test/files/pos/t9123.scala
@@ -0,0 +1,10 @@
+trait Setting {
+ type T
+ def value: T
+}
+
+object Test {
+ def test(x: Some[Setting]) = x match {
+ case Some(dep) => Some(dep.value) map (_ => true)
+ }
+}
diff --git a/test/files/pos/t9135.scala b/test/files/pos/t9135.scala
new file mode 100644
index 0000000000..1e2c97baf9
--- /dev/null
+++ b/test/files/pos/t9135.scala
@@ -0,0 +1,16 @@
+
+class Free[A] {
+
+
+ this match {
+ case a @ Gosub() => gosub(a.a)(x => gosub(???)(???))
+ }
+ def gosub[A, B](a0: Free[A])(f0: A => Any): Free[B] = ???
+}
+
+
+
+ case class Gosub[B]() extends Free[B] {
+ type C
+ def a: Free[C] = ???
+ }
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index 41c2ccdcb8..c24fd6238f 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -1,3 +1,4 @@
+warning: there were three deprecation warnings; re-run with -deprecation for details
ms0 = BitSet(2)
ms1 = BitSet(2)
ms2 = BitSet(2)
diff --git a/test/files/run/t6502.check b/test/files/run/t6502.check
deleted file mode 100644
index 95d36ee221..0000000000
--- a/test/files/run/t6502.check
+++ /dev/null
@@ -1,8 +0,0 @@
-test1 res1: true
-test1 res2: true
-test2 res1: true
-test2 res2: true
-test3 res1: true
-test3 res2: true
-test4 res1: true
-test4 res2: true
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
index 4ce034a482..52fabef6b8 100644
--- a/test/files/run/t6502.scala
+++ b/test/files/run/t6502.scala
@@ -46,6 +46,12 @@ object Test extends StoreReporterDirectTest {
}
}"""
+ def app6 = """
+ package test6
+ class A extends Test { println("created test6.A") }
+ class Z extends Test { println("created test6.Z") }
+ trait Test"""
+
def test1(): Unit = {
val jar = "test1.jar"
compileCode(app1, jar)
@@ -53,11 +59,12 @@ object Test extends StoreReporterDirectTest {
val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar", "test.Test.test()")
val output = ILoop.run(codeToRun, settings)
val lines = output.split("\n")
- val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
- val res2 = lines(lines.length-3).contains("testing...")
-
- println(s"test1 res1: $res1")
- println(s"test1 res2: $res2")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("testing...")
+ }
}
def test2(): Unit = {
@@ -69,11 +76,12 @@ object Test extends StoreReporterDirectTest {
val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar2")
val output = ILoop.run(codeToRun, settings)
val lines = output.split("\n")
- val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
- val res2 = lines(lines.length-3).contains("test2.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
-
- println(s"test2 res1: $res1")
- println(s"test2 res2: $res2")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("test2.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+ }
}
def test3(): Unit = {
@@ -85,11 +93,12 @@ object Test extends StoreReporterDirectTest {
val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar3", "test.Test3.test()")
val output = ILoop.run(codeToRun, settings)
val lines = output.split("\n")
- val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
- val res2 = lines(lines.length-3).contains("new object in existing package")
-
- println(s"test3 res1: $res1")
- println(s"test3 res2: $res2")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("new object in existing package")
+ }
}
def test4(): Unit = {
@@ -98,11 +107,30 @@ object Test extends StoreReporterDirectTest {
val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar1")
val output = ILoop.run(codeToRun, settings)
val lines = output.split("\n")
- val res1 = lines(4).contains("Added") && lines(4).contains("test1.jar")
- val res2 = lines(lines.length-3).contains("test1.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("test1.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+ }
+ }
- println(s"test4 res1: $res1")
- println(s"test4 res2: $res2")
+ def test5(): Unit = {
+ val codeToRun = ":require /does/not/exist.jar"
+ val output = ILoop.run(codeToRun, settings)
+ assert(!output.contains("NullPointerException"), output)
+ assert(output.contains("Cannot load '/does/not/exist.jar'"), output)
+ }
+
+ def test6(): Unit = {
+ // Avoid java.lang.NoClassDefFoundError triggered by the old appoach of using a Java
+ // classloader to parse .class files in order to read their names.
+ val jar = "test6.jar"
+ compileCode(app6, jar)
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar", "import test6._; new A; new Z")
+ val output = ILoop.run(codeToRun, settings)
+ assert(output.contains("created test6.A"), output)
+ assert(output.contains("created test6.Z"), output)
}
def show(): Unit = {
@@ -110,7 +138,9 @@ object Test extends StoreReporterDirectTest {
test2()
test3()
test4()
+ test5()
+ test6()
}
- def toCodeInSeparateLines(lines: String*): String = lines.map(_ + "\n").mkString
+ def toCodeInSeparateLines(lines: String*): String = lines mkString "\n"
}
diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala
index 2094a46e37..05e97a13c9 100644
--- a/test/files/scalacheck/nan-ordering.scala
+++ b/test/files/scalacheck/nan-ordering.scala
@@ -42,16 +42,16 @@ object Test extends Properties("NaN-Ordering") {
property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) }
property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => {
- val mathmin = math.min(d1, d2)
+ val mathmax = math.max(d1, d2)
val numericmin = numFloat.reverse.min(d1, d2)
- mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
}
}
property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => {
- val mathmax = math.max(d1, d2)
+ val mathmin = math.min(d1, d2)
val numericmax = numFloat.reverse.max(d1, d2)
- mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
}
}
@@ -105,16 +105,16 @@ object Test extends Properties("NaN-Ordering") {
property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) }
property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
- val mathmin = math.min(d1, d2)
+ val mathmax = math.max(d1, d2)
val numericmin = numDouble.reverse.min(d1, d2)
- mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
}
}
property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
- val mathmax = math.max(d1, d2)
+ val mathmin = math.min(d1, d2)
val numericmax = numDouble.reverse.max(d1, d2)
- mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
}
}
diff --git a/test/junit/scala/StringContextTest.scala b/test/junit/scala/StringContextTest.scala
index 608b82bd96..7e9e775d58 100644
--- a/test/junit/scala/StringContextTest.scala
+++ b/test/junit/scala/StringContextTest.scala
@@ -65,14 +65,23 @@ class StringContextTest {
@Test def fIf() = {
val res = f"${if (true) 2.5 else 2.5}%.2f"
- assertEquals("2.50", res)
+ val expected = formatUsingCurrentLocale(2.50)
+ assertEquals(expected, res)
}
+
@Test def fIfNot() = {
val res = f"${if (false) 2.5 else 3.5}%.2f"
- assertEquals("3.50", res)
+ val expected = formatUsingCurrentLocale(3.50)
+ assertEquals(expected, res)
}
+
@Test def fHeteroArgs() = {
val res = f"${3.14}%.2f rounds to ${3}%d"
- assertEquals("3.14 rounds to 3", res)
+ val expected = formatUsingCurrentLocale(3.14) + " rounds to 3"
+ assertEquals(expected, res)
}
+
+ // Use this method to avoid problems with a locale-dependent decimal mark.
+ // The string interpolation is not used here intentionally as this method is used to test string interpolation.
+ private def formatUsingCurrentLocale(number: Double, decimalPlaces: Int = 2) = ("%." + decimalPlaces + "f").format(number)
}
diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala
index 55da02744b..ab09c4930b 100644
--- a/test/junit/scala/collection/IterableViewLikeTest.scala
+++ b/test/junit/scala/collection/IterableViewLikeTest.scala
@@ -4,6 +4,7 @@ import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
+import language.postfixOps
@RunWith(classOf[JUnit4])
class IterableViewLikeTest {
diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala
new file mode 100644
index 0000000000..3722bdfe4d
--- /dev/null
+++ b/test/junit/scala/collection/immutable/StringLikeTest.scala
@@ -0,0 +1,37 @@
+package scala.collection.immutable
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil
+import scala.util.Random
+
+/* Test for SI-8988 */
+@RunWith(classOf[JUnit4])
+class StringLikeTest {
+ @Test
+ def testStringSplitWithChar: Unit = {
+ val chars = (0 to 255).map(_.toChar)
+ def randString = Random.nextString(30)
+
+ for (c <- chars) {
+ val s = randString
+ val jString = new java.lang.String(s)
+
+ // make sure we can match a literal character done by Java's split
+ val jSplit = jString.split("\\Q" + c.toString + "\\E")
+ val sSplit = s.split(c)
+ AssertUtil.assertSameElements(jSplit, sSplit, s"Not same result as Java split for char $c in string $s")
+ }
+ }
+
+ @Test
+ def testSplitEdgeCases: Unit = {
+ AssertUtil.assertSameElements("abcd".split('d'), Array("abc")) // not Array("abc", "")
+ AssertUtil.assertSameElements("abccc".split('c'), Array("ab")) // not Array("ab", "", "", "")
+ AssertUtil.assertSameElements("xxx".split('x'), Array[String]()) // not Array("", "", "", "")
+ AssertUtil.assertSameElements("".split('x'), Array("")) // not Array()
+ AssertUtil.assertSameElements("--ch--omp--".split("-"), Array("", "", "ch", "", "omp")) // All the cases!
+ }
+}
diff --git a/test/junit/scala/collection/immutable/VectorTest.scala b/test/junit/scala/collection/immutable/VectorTest.scala
new file mode 100644
index 0000000000..e7edba3e43
--- /dev/null
+++ b/test/junit/scala/collection/immutable/VectorTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.{Assert, Test}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class VectorTest {
+ /**
+ * Test Vector ++ with a small parallel collection concatenation (SI-9072).
+ *
+ */
+ @Test
+ def testPlusPlus(): Unit = {
+ val smallVec = (0 to 1)
+ val smallParVec = smallVec.par
+ val testElementsSize = (0 to 1000).map( _ => Vector.empty ++ smallParVec )
+ Assert.assertTrue(testElementsSize.forall( v => v.size == 2 ))
+ }
+}
diff --git a/test/junit/scala/collection/mutable/LinkedHashMapTest.scala b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala
new file mode 100644
index 0000000000..37dcd028a5
--- /dev/null
+++ b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala
@@ -0,0 +1,25 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{ Assert, Test }
+
+import scala.collection.mutable
+
+/* Test for SI-9095 */
+@RunWith(classOf[JUnit4])
+class LinkedHashMapTest {
+ class TestClass extends mutable.LinkedHashMap[String, Int] {
+ def lastItemRef = lastEntry
+ }
+
+ @Test
+ def testClear: Unit = {
+ val lhm = new TestClass
+ Seq("a" -> 8, "b" -> 9).foreach(kv => lhm.put(kv._1, kv._2))
+
+ Assert.assertNotNull(lhm.lastItemRef)
+ lhm.clear()
+ Assert.assertNull(lhm.lastItemRef)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/LinkedHashSetTest.scala b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala
new file mode 100644
index 0000000000..b419ad37ec
--- /dev/null
+++ b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala
@@ -0,0 +1,25 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{ Assert, Test }
+
+import scala.collection.mutable
+
+/* Test for SI-9095 */
+@RunWith(classOf[JUnit4])
+class LinkedHashSetTest {
+ class TestClass extends mutable.LinkedHashSet[String] {
+ def lastItemRef = lastEntry
+ }
+
+ @Test
+ def testClear: Unit = {
+ val lhs = new TestClass
+ Seq("a", "b").foreach(k => lhs.add(k))
+
+ Assert.assertNotNull(lhs.lastItemRef)
+ lhs.clear()
+ Assert.assertNull(lhs.lastItemRef)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/MutableListTest.scala b/test/junit/scala/collection/mutable/MutableListTest.scala
new file mode 100644
index 0000000000..ac6d30def0
--- /dev/null
+++ b/test/junit/scala/collection/mutable/MutableListTest.scala
@@ -0,0 +1,37 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.tools.testing.AssertUtil._
+
+@RunWith(classOf[JUnit4])
+class MutableListTest {
+
+ // Tests SI-8976
+ @Test def tailIteratorMustTerminateAtLength(): Unit = {
+ val is = MutableList(1,2,3)
+ val tl = is.tail
+ assertEquals(tl.length, tl.iterator.length)
+ is += 5
+ assertEquals(tl.length, tl.iterator.length)
+ assertSameElements(tl, tl.iterator)
+ }
+ @Test def iteratorMustFailEventually(): Unit = assertThrows[NoSuchElementException] {
+ MutableList[Unit]().iterator.next()
+ }
+ // was: Root empty iterator held reference
+ @Test def iteratorMustNotHoldOntoLast(): Unit = {
+ val is = MutableList(Some(1), Some(2))
+ val it = is.iterator
+ val x = Some(3)
+ is += x
+ assertNotReachable(x, it) {
+ it.next()
+ it.next()
+ }
+ assertTrue(it.isEmpty)
+ }
+}
diff --git a/test/junit/scala/math/OrderingTest.scala b/test/junit/scala/math/OrderingTest.scala
new file mode 100644
index 0000000000..218622b8b4
--- /dev/null
+++ b/test/junit/scala/math/OrderingTest.scala
@@ -0,0 +1,61 @@
+package scala.math
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class OrderingTest {
+
+ /* Test for SI-9077 */
+ @Test
+ def testReverseOrdering {
+ def check[T: Ordering](t1: T, t2: T): Unit = {
+ val O = Ordering[T]
+ val R = O.reverse
+ assertEquals(O.min(t1, t2), R.max(t1, t2))
+ assertEquals(O.max(t1, t2), R.min(t1, t2))
+
+ assertEquals(O.lteq(t1, t2), R.lteq(t2, t1))
+ assertEquals(O.lt(t1, t2), R.lt(t2, t1))
+ assertEquals(O.gteq(t1, t2), R.gteq(t2, t1))
+ assertEquals(O.gt(t1, t2), R.gt(t2, t1))
+ assertEquals(O.compare(t1, t2), R.compare(t2, t1))
+
+ assertEquals(O.equiv(t1, t2), R.equiv(t1, t2))
+
+ assertEquals(O.on((x: T) => x).min(t1, t2), R.on((x: T) => x).max(t1, t2))
+
+ assertEquals(O.tryCompare(t1, t2), R.tryCompare(t2, t1))
+
+ assertEquals(O.mkOrderingOps(t1).<(t2), R.mkOrderingOps(t2).<(t1))
+ assertEquals(O.mkOrderingOps(t1).<=(t2), R.mkOrderingOps(t2).<=(t1))
+ assertEquals(O.mkOrderingOps(t1).>(t2), R.mkOrderingOps(t2).>(t1))
+ assertEquals(O.mkOrderingOps(t1).>=(t2), R.mkOrderingOps(t2).>=(t1))
+
+ assertEquals(O.mkOrderingOps(t1).min(t2), R.mkOrderingOps(t1).max(t2))
+ assertEquals(O.mkOrderingOps(t1).max(t2), R.mkOrderingOps(t1).min(t2))
+ }
+ def checkAll[T: Ordering](ts: T*): Unit = {
+ for (t1 <- ts; t2 <- ts) check(t1, t2)
+ }
+ checkAll[Unit](())
+ checkAll[Boolean](true, false)
+ checkAll[Byte](Byte.MinValue, -1.toByte, 0.toByte, 1.toByte, Byte.MaxValue)
+ checkAll[Char](Char.MinValue, -1.toChar, 0.toChar, 1.toChar, Char.MaxValue)
+ checkAll[Short](Short.MinValue, -1, 0, 1, Short.MaxValue)
+ checkAll[Int](Int.MinValue, -1, 0, 1, Int.MaxValue)
+ checkAll[Double](Double.MinValue, -1, -0, 0, 1, Double.MaxValue)
+ checkAll[Float](Float.MinValue, -1, -0, 0, 1, Float.MaxValue)
+
+ checkAll[BigInt](Int.MinValue, -1, 0, 1, Int.MaxValue)
+ checkAll[BigDecimal](Int.MinValue, -1, -0, 1, Int.MaxValue)
+ checkAll[String]("", "a", "b", "bb")
+ checkAll[String]("", "a", "b", "bb")
+ checkAll[Option[Int]](None, Some(1), Some(2))
+ checkAll[Iterable[Int]](Nil, List(1), List(1, 2))
+ checkAll[(Int, Int)]((1, 2), (1, 3), (4, 5))
+ }
+}
+
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
index 11e955a4bb..895ad9d683 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
@@ -44,4 +44,9 @@ class SymbolTableTest {
assertFalse("Foo should be a superclass of Foo", fooSymbol.tpe <:< barSymbol.tpe)
}
+ @Test
+ def noSymbolOuterClass_t9133: Unit = {
+ import symbolTable._
+ assert(NoSymbol.outerClass == NoSymbol)
+ }
}
diff --git a/test/junit/scala/tools/testing/AssertThrowsTest.scala b/test/junit/scala/tools/testing/AssertThrowsTest.scala
index d91e450bac..76758f51d2 100644
--- a/test/junit/scala/tools/testing/AssertThrowsTest.scala
+++ b/test/junit/scala/tools/testing/AssertThrowsTest.scala
@@ -38,6 +38,6 @@ class AssertThrowsTest {
} catch {
case e: AssertionError => return
}
- assert(false, "assertThrows should error if the tested expression does not throw anything")
+ fail("assertThrows should error if the tested expression does not throw anything")
}
}
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
index 83a637783f..d29f9a473f 100644
--- a/test/junit/scala/tools/testing/AssertUtil.scala
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -2,18 +2,42 @@ package scala.tools
package testing
import org.junit.Assert
-import Assert.fail
+import Assert._
import scala.runtime.ScalaRunTime.stringOf
import scala.collection.{ GenIterable, IterableLike }
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+import java.lang.ref._
+import java.lang.reflect._
+import java.util.IdentityHashMap
/** This module contains additional higher-level assert statements
* that are ultimately based on junit.Assert primitives.
*/
object AssertUtil {
- /**
- * Check if throwable T (or a subclass) was thrown during evaluation of f, and that its message
- * satisfies the `checkMessage` predicate.
- * If any other exception will be re-thrown.
+ private final val timeout = 60 * 1000L // wait a minute
+
+ private implicit class `ref helper`[A](val r: Reference[A]) extends AnyVal {
+ def isEmpty: Boolean = r.get == null
+ def nonEmpty: Boolean = !isEmpty
+ }
+ private implicit class `class helper`(val clazz: Class[_]) extends AnyVal {
+ def allFields: List[Field] = {
+ def loop(k: Class[_]): List[Field] =
+ if (k == null) Nil
+ else k.getDeclaredFields.toList ::: loop(k.getSuperclass)
+ loop(clazz)
+ }
+ }
+ private implicit class `field helper`(val f: Field) extends AnyVal {
+ def follow(o: AnyRef): AnyRef = {
+ f setAccessible true
+ f get o
+ }
+ }
+
+ /** Check if throwable T (or a subclass) was thrown during evaluation of f, and that its message
+ * satisfies the `checkMessage` predicate. If any other exception will be re-thrown.
*/
def assertThrows[T <: Throwable](f: => Any,
checkMessage: String => Boolean = s => true)
@@ -41,4 +65,29 @@ object AssertUtil {
*/
def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: Iterator[B]): Unit =
assertSameElements(expected, actual.toList, "")
+
+ /** Value is not strongly reachable from roots after body is evaluated.
+ */
+ def assertNotReachable[A <: AnyRef](a: => A, roots: AnyRef*)(body: => Unit): Unit = {
+ val wkref = new WeakReference(a)
+ def refs(root: AnyRef): mutable.Set[AnyRef] = {
+ val seen = new IdentityHashMap[AnyRef, Unit]
+ def loop(o: AnyRef): Unit =
+ if (wkref.nonEmpty && o != null && !seen.containsKey(o)) {
+ seen.put(o, ())
+ for {
+ f <- o.getClass.allFields
+ if !Modifier.isStatic(f.getModifiers)
+ if !f.getType.isPrimitive
+ if !classOf[Reference[_]].isAssignableFrom(f.getType)
+ } loop(f follow o)
+ }
+ loop(root)
+ seen.keySet.asScala
+ }
+ body
+ for (r <- roots if wkref.nonEmpty) {
+ assertFalse(s"Root $r held reference", refs(r) contains wkref.get)
+ }
+ }
}
diff --git a/test/junit/scala/tools/testing/AssertUtilTest.scala b/test/junit/scala/tools/testing/AssertUtilTest.scala
new file mode 100644
index 0000000000..03d8815ab2
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertUtilTest.scala
@@ -0,0 +1,21 @@
+package scala.tools
+package testing
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import AssertUtil._
+
+import java.lang.ref._
+
+@RunWith(classOf[JUnit4])
+class AssertUtilTest {
+
+ @Test def reachableIgnoresReferences(): Unit = {
+ class Holder[A](val ref: SoftReference[A])
+ val o = new Object
+ val r = new SoftReference(o)
+ assertNotReachable(o, new Holder(r)) { }
+ }
+}
diff --git a/versions.properties b/versions.properties
index a474b19c5b..88271da4eb 100644
--- a/versions.properties
+++ b/versions.properties
@@ -4,7 +4,7 @@
# when adding new properties that influence a release,
# also add them to the update.versions mechanism in build.xml,
# which is used by scala-release-2.11.x in scala/jenkins-scripts
-starr.version=2.11.2
+starr.version=2.11.5
starr.use.released=1
# These are the versions of the modules that go with this release.
@@ -14,7 +14,7 @@ starr.use.released=1
scala.binary.version=2.11
# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1
# this defines the dependency on scala-continuations-plugin in scala-dist's pom
-scala.full.version=2.11.2
+scala.full.version=2.11.5
# external modules shipped with distribution, as specified by scala-library-all's pom
scala-xml.version.number=1.0.3
@@ -27,7 +27,7 @@ actors-migration.version.number=1.1.0
jline.version=2.12
# external modules, used internally (not shipped)
-partest.version.number=1.0.1
+partest.version.number=1.0.3
scalacheck.version.number=1.11.4
# TODO: modularize the compiler