summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build-ant-macros.xml9
-rwxr-xr-xbuild.xml239
-rw-r--r--doc/LICENSE.md (renamed from docs/LICENSE)21
-rw-r--r--doc/License.rtf65
-rw-r--r--doc/README (renamed from docs/README)2
-rw-r--r--doc/licenses/apache_jansi.txt (renamed from docs/licenses/apache_jansi.txt)0
-rw-r--r--doc/licenses/bsd_asm.txt (renamed from docs/licenses/bsd_asm.txt)0
-rw-r--r--doc/licenses/bsd_jline.txt (renamed from docs/licenses/bsd_jline.txt)0
-rw-r--r--doc/licenses/mit_jquery-layout.txt (renamed from docs/licenses/mit_jquery-layout.txt)0
-rw-r--r--doc/licenses/mit_jquery-ui.txt (renamed from docs/licenses/mit_jquery-ui.txt)0
-rw-r--r--doc/licenses/mit_jquery.txt (renamed from docs/licenses/mit_jquery.txt)0
-rw-r--r--doc/licenses/mit_sizzle.txt (renamed from docs/licenses/mit_sizzle.txt)0
-rw-r--r--doc/licenses/mit_tools.tooltip.txt (renamed from docs/licenses/mit_tools.tooltip.txt)0
-rw-r--r--src/build/maven/maven-deploy.xml171
-rw-r--r--src/build/maven/scala-compiler-pom.xml2
-rw-r--r--src/build/maven/scala-dist-pom.xml75
-rw-r--r--src/build/maven/scala-library-all-pom.xml99
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala1
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala164
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala78
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala155
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala154
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala146
-rw-r--r--src/library/rootdoc.txt63
-rw-r--r--src/library/scala/collection/GenSeqLike.scala27
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala24
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala8
-rw-r--r--src/library/scala/collection/TraversableOnce.scala28
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala6
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala33
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala5
-rw-r--r--src/library/scala/collection/immutable/Range.scala21
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala12
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/manual/scala/man1/scala.scala6
-rw-r--r--src/manual/scala/man1/scalac.scala185
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala8
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala55
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala7
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala1
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala12
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala20
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala1
-rw-r--r--src/swing/scala/swing/Publisher.scala174
-rw-r--r--test/files/neg/t2066.check21
-rw-r--r--test/files/neg/t2066.scala70
-rw-r--r--test/files/neg/t2066b.check5
-rw-r--r--test/files/neg/t2066b.scala (renamed from test/pending/neg/t2066.scala)0
-rw-r--r--test/files/neg/t4425.check6
-rw-r--r--test/files/neg/t4425b.check42
-rw-r--r--test/files/neg/t5903a.check2
-rw-r--r--test/files/neg/t6675b.check37
-rw-r--r--test/files/neg/t6675b.flags1
-rw-r--r--test/files/neg/t6675b.scala40
-rw-r--r--test/files/neg/t7214neg.check7
-rw-r--r--test/files/neg/t7850.check7
-rw-r--r--test/files/neg/t7850.scala16
-rw-r--r--test/files/neg/t7897.check4
-rw-r--r--test/files/neg/t7897.scala23
-rw-r--r--test/files/neg/t997.check7
-rw-r--r--test/files/pos/t2066.scala25
-rw-r--r--test/files/pos/t8045.scala17
-rw-r--r--test/files/pos/t8046.scala20
-rw-r--r--test/files/pos/t8046b.scala16
-rw-r--r--test/files/pos/t8046c.scala19
-rw-r--r--test/files/pos/t8128.scala15
-rw-r--r--test/files/run/macroPlugins-namerHooks.scala7
-rw-r--r--test/files/run/name-based-patmat.check2
-rw-r--r--test/files/run/name-based-patmat.scala42
-rw-r--r--test/files/run/patmat-mix-case-extractor.check8
-rw-r--r--test/files/run/patmat-mix-case-extractor.scala110
-rw-r--r--test/files/run/t8046.check2
-rw-r--r--test/files/run/t8046/Test.scala18
-rw-r--r--test/files/run/t8046/t8046c.scala13
-rw-r--r--test/files/scalacheck/range.scala41
-rw-r--r--test/junit/scala/collection/PagedSeq.scala16
-rw-r--r--test/pending/pos/t6161.scala22
-rw-r--r--test/pending/pos/t8128b.scala18
-rw-r--r--test/pending/presentation/context-bounds1.check (renamed from test/files/presentation/context-bounds1.check)0
-rw-r--r--test/pending/presentation/context-bounds1/Test.scala (renamed from test/files/presentation/context-bounds1/Test.scala)0
-rw-r--r--test/pending/presentation/context-bounds1/src/ContextBounds.scala (renamed from test/files/presentation/context-bounds1/src/ContextBounds.scala)0
-rw-r--r--test/pending/run/reflection-sync-potpourri.scala (renamed from test/files/run/reflection-sync-potpourri.scala)0
-rw-r--r--versions.properties4
97 files changed, 2023 insertions, 858 deletions
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
index 593f93b784..0b92f1dab1 100644
--- a/build-ant-macros.xml
+++ b/build-ant-macros.xml
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="build-support">
+<project name="build-support" xmlns:artifact="urn:maven-artifact-ant">
<description> Macros for Scala's ant build </description>
<macrodef name="optimized">
@@ -451,8 +451,8 @@
<attribute name="project"/>
<sequential>
<local name="artifact-base"/>
- <property name="artifact-base" value="${maven-base}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
- <mkdir dir="${maven-base}/${@{project}.dir}${@{project}.name}"/>
+ <property name="artifact-base" value="${dist.maven}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
+ <mkdir dir="${dist.maven}/${@{project}.dir}${@{project}.name}"/>
<copy tofile="${artifact-base}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.jar" overwrite="true"/>
<copy tofile="${artifact-base}-src.jar" file="${build-osgi.dir}/${@{project}.name}-src.jar" overwrite="true"/>
<copy tofile="${artifact-base}-pom.xml" file="${src.dir}/build/maven/${@{project}.dir}/${@{project}.name}-pom.xml" overwrite="true"/>
@@ -469,7 +469,8 @@
</sequential>
</macrodef>
-
+ <!-- TODO inline maven-deploy.xml's macrodefs, remove maven-deploy.xml -->
+ <include file="src/build/maven/maven-deploy.xml" as="deploy-macros"/>
<macrodef name="testSuite">
<attribute name="dir" default="${partest.dir}"/>
diff --git a/build.xml b/build.xml
index 53cd998f2d..00ad5f78e9 100755
--- a/build.xml
+++ b/build.xml
@@ -4,17 +4,14 @@
xmlns:artifact="urn:maven-artifact-ant"
xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<include file="build-ant-macros.xml" as="macros"/>
- <include file="src/build/maven/maven-deploy.xml" as="maven-deploy"/>
<description>
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
</description>
<!-- HINTS
-
- for faster builds, have a build.properties in the same directory as build.xml that says:
locker.skip=1
-
-->
<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
@@ -27,11 +24,14 @@ antArgs tend to be:
scalacArgs examples:
"-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
-targets exercised:
- deploy-core.snapshot publish-opt-nodocs distpack-maven-opt nightly locker.done build build-opt test.suite test.continuations.suite test.scaladoc
+supported/exercised targets
+ to publish: nightly publish-opt-nodocs
+ to build: build build-opt locker.done
+ to run tests: test.suite test.scaladoc
+
+DO NOT RELY ON ANY OTHER TARGETS (ok, you're probably ok assuming the ones defined in the first 100 lines of this file)
-NOTE: after distpack-maven-opt, it is expected there's a build file in dists/maven/latest that defines targets deploy and deploy.local
-TODO: get rid of this separate step
+NOTE: dists/maven/latest/build.xml will soon disappear; call `publish` in this build instead
-->
<!-- To use Zinc with the ant build:
@@ -70,7 +70,6 @@ TODO:
<!-- packaging -->
<target name="distpack" depends="pack-archives.done, pack-maven.done"/>
<target name="distpack-maven" depends="pack-maven.done"/>
-
<target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
<target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
@@ -81,7 +80,6 @@ TODO:
<param name="scalac.args.optimise" value="-optimise"/>
</antcall>
</target>
-
<target name="publish-core-opt-nodocs" description="Builds an untested, undocumented optimised core (library/reflect/compiler) and publishes to maven.">
<antcall target="publish-core">
<param name="docs.skip" value="1"/>
@@ -93,24 +91,22 @@ TODO:
<param name="docs.skip" value="1"/>
</antcall>
</target>
-
<target name="all.done" depends="test.done, distpack"/>
-
<target name="nightly-nopt" depends="all.done"/>
<target name="nightly"><optimized name="nightly-nopt"/></target>
-
<target name="nightly.checkall">
<antcall target="nightly-nopt"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
+ <!-- prefer the sbt names, but the dotted names are used in jenkins;
+ rename there first before dropping the dotted ones -->
+ <target name="publish-local" depends="publish.local"/>
+ <target name="publish-signed" depends="publish.signed"/>
+
<target name="clean" depends="quick.clean" description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
<target name="docsclean" depends="docs.clean" description="Removes generated documentation. Distributions are untouched."/>
<target name="distclean" depends="dist.clean" description="Removes all distributions. Binaries and documentation are untouched."/>
- <target name="test.continuations.suite">
- <echo message="DEPRECATED: the continuations have moved to https://github.com/scala/scala-continuations"/>
- </target>
-
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
@@ -120,7 +116,7 @@ TODO:
<property name="build.sysclasspath" value="ignore"/>
<!-- Defines the repository layout -->
- <property name="docs.dir" value="${basedir}/docs"/>
+ <property name="doc.dir" value="${basedir}/doc"/>
<property name="lib.dir" value="${basedir}/lib"/>
<property name="src.dir" value="${basedir}/src"/>
<property name="partest.dir" value="${basedir}/test"/>
@@ -151,6 +147,7 @@ TODO:
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
<property name="build-pack.dir" value="${build.dir}/pack"/>
+ <property name="build-manual.dir" value="${build.dir}/manual"/>
<property name="build-osgi.dir" value="${build.dir}/osgi"/>
<property name="build-junit.dir" value="${build.dir}/junit"/>
<property name="build-strap.dir" value="${build.dir}/strap"/>
@@ -401,6 +398,13 @@ TODO:
<property name="version.number" value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
</else></if>
+ <!-- some default in case something went wrong getting the revision -->
+ <property name="version.number" value="-unknown-"/>
+
+ <property name="dist.name" value="scala-${version.number}"/>
+ <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+ <property name="dist.maven" value="${dists.dir}/maven/${version.number}"/>
+
<condition property="has.java6">
<equals arg1="${ant.java.version}" arg2="1.6"/>
</condition>
@@ -445,10 +449,6 @@ TODO:
<format property="short" pattern="yyyyMMddHHmmss"/>
</tstamp>
- <!-- some default in case something went wrong getting the revision -->
- <property name="version.number" value="-unknown-"/>
- <property name="init.avail" value="yes"/>
-
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
@@ -1559,8 +1559,36 @@ TODO:
</staged-docs>
</target>
- <target name="docs.man" depends="docs.start">
- <staged-uptodate stage="docs" project="manual">
+ <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
+ <!-- TODO modularize compiler: docs.scaladoc, docs.interactive, -->
+ <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
+
+<!-- ===========================================================================
+ DISTRIBUTION
+============================================================================ -->
+ <!-- bin/ -->
+ <target name="dist.bin" depends="pack.bin">
+ <mkdir dir="${dist.dir}/bin"/>
+ <copy toDir="${dist.dir}/bin" overwrite="true">
+ <fileset dir="${build-pack.dir}/bin"/>
+ </copy>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scalac"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
+ </target>
+
+ <!-- doc/ and man/ -->
+ <target name="dist.doc" depends="scaladoc.task" unless="docs.skip"> <!-- depends on scaladoc.task for scalac taskdef -->
+ <mkdir dir="${dist.dir}/doc"/>
+ <copy toDir="${dist.dir}/doc" overwrite="true">
+ <fileset dir="${doc.dir}"/>
+ </copy>
+
+ <mkdir dir="${dist.dir}/doc/tools"/>
+ <mkdir dir="${dist.dir}/man/man1"/>
+ <staged-uptodate stage="manual" project="manual">
<check><srcfiles dir="${src.dir}/manual"/></check>
<do>
<mkdir dir="${build.dir}/manmaker/classes"/>
@@ -1570,44 +1598,33 @@ TODO:
srcdir="${src.dir}/manual"
includes="**/*.scala"
addparams="${scalac.args.all} -language:implicitConversions"/>
- <mkdir dir="${build-docs.dir}/manual/man/man1"/>
- <mkdir dir="${build-docs.dir}/manual/html"/>
- <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
+ <mkdir dir="${build-manual.dir}/genman/man1"/>
<taskdef name="genman"
classname="scala.tools.docutil.ManMaker"
classpathref="manual.build.path"/>
<genman command="fsc, scala, scalac, scaladoc, scalap"
- htmlout="${build-docs.dir}/manual/html"
- manout="${build-docs.dir}/manual/genman"/>
- <!-- On Windows source and target files can't be the same ! -->
- <fixcrlf
- srcdir="${build-docs.dir}/manual/genman"
- destdir="${build-docs.dir}/manual/man"
- eol="unix" includes="**/*.1"/>
- <copy todir="${build-docs.dir}/manual/html" overwrite="true">
- <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.gif"/>
- <include name="**/*.png"/>
- </fileset>
- </copy>
+ htmlout="${dist.dir}/doc/tools"
+ manout="${build-manual.dir}/genman"/>
</do>
</staged-uptodate>
- </target>
-
- <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
- <!-- TODO modularize compiler: docs.scaladoc, docs.interactive, -->
- <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
-
-<!-- ===========================================================================
- DISTRIBUTION
-============================================================================ -->
- <target name="dist.base" depends="osgi.done">
- <property name="dist.name" value="scala-${version.number}"/>
- <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+ <!-- On Windows source and target files can't be the same ! -->
+ <fixcrlf
+ srcdir="${build-manual.dir}/genman"
+ destdir="${dist.dir}/man"
+ eol="unix" includes="**/*.1"/>
+ <copy todir="${dist.dir}/doc/tools" overwrite="true">
+ <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
+ <include name="**/*.html"/>
+ <include name="**/*.css"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.png"/>
+ </fileset>
+ </copy>
+ </target>
+ <!-- lib/ and src/ (jars: classes and sources) -->
+ <target name="dist.lib" depends="osgi.done">
<mkdir dir="${dist.dir}/lib"/>
<mkdir dir="${dist.dir}/src"/>
@@ -1637,29 +1654,10 @@ TODO:
</fileset>
</copy>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
-
- <mkdir dir="${dist.dir}/bin"/>
- <copy toDir="${dist.dir}/bin" overwrite="true">
- <fileset dir="${build-pack.dir}/bin"/>
- </copy>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
</target>
-
- <target name="dist.doc" depends="dist.base, docs.done">
- <mkdir dir="${dist.dir}/doc"/>
- <mkdir dir="${dist.dir}/doc/licenses"/>
- <mkdir dir="${dist.dir}/doc/tools"/>
- <copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc" overwrite="true"/>
- <copy file="${docs.dir}/README" toDir="${dist.dir}/doc" overwrite="true"/>
- <copy toDir="${dist.dir}/doc/licenses" overwrite="true">
- <fileset dir="${docs.dir}/licenses"/>
- </copy>
-
+ <!-- api/ (scaladoc) -->
+ <target name="dist.api" depends="docs.done" unless="docs.skip">
<mkdir dir="${dist.dir}/api"/>
<copy toDir="${dist.dir}/api" overwrite="true">
<fileset dir="${build-docs.dir}/library"/>
@@ -1674,20 +1672,7 @@ TODO:
</copy>
</target>
-
- <target name="dist.man" depends="dist.base, docs.man">
- <mkdir dir="${dist.dir}/man"/>
- <copy toDir="${dist.dir}/man" overwrite="true">
- <fileset dir="${build-docs.dir}/manual/man"/>
- </copy>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/tools" overwrite="true">
- <fileset dir="${build-docs.dir}/manual/html"/>
- </copy>
- </target>
-
-
- <target name="dist.partial" depends="dist.base">
+ <target name="dist.partial" depends="dist.lib, dist.bin, dist.doc">
<if><not><os family="windows"/></not><then>
<symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="true"/>
</then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
@@ -1695,12 +1680,13 @@ TODO:
</else></if>
</target>
- <target name="dist.done" depends="dist.doc, dist.man, dist.partial"/>
+ <target name="dist.done" depends="dist.partial, dist.api"/>
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
============================================================================ -->
+ <!-- TODO: get rid of this, it's redundant between maven and github -->
<target name="pack-archives.done" depends="dist.done, docs.done">
<mkdir dir="${dists.dir}/archives"/>
<property name="archive-base" value="${dists.dir}/archives/${dist.name}"/>
@@ -1716,7 +1702,7 @@ MAIN DISTRIBUTION PACKAGING
<if><not><isset property="docs.skip"/></not><then>
<tarz name="${archive-base}-devel-docs">
- <tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
+ <tarfileset dir="${dist.dir}/api" prefix="${dist.name}-devel-docs"/>
</tarz>
</then></if>
@@ -1761,13 +1747,13 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-maven.core" depends="osgi.core, docs.core">
- <property name="maven-base" value="${dists.dir}/maven/${version.number}"/>
- <mkdir dir="${maven-base}"/>
-
<mvn-package project="library"/>
<mvn-package project="reflect"/>
<mvn-package project="compiler"/>
+ <copy tofile="${dist.maven}/scala-library-all/scala-library-all-pom.xml"
+ file="${src.dir}/build/maven/scala-library-all-pom.xml" overwrite="true"/>
+
<!-- for replacestarr -->
<if><isset property="update.starr.version"/><then>
<echo message="From now on, ${maven.version.number} will be used as STARR (`build.properties`'s `starr.version` was modified)."/>
@@ -1777,7 +1763,16 @@ MAIN DISTRIBUTION PACKAGING
</then></if>
</target>
- <target name="pack-maven.base" depends="pack-maven.core, osgi.done, docs.done">
+ <target name="pack-maven.dist" depends="dist.bin, dist.doc">
+ <copy tofile="${dist.maven}/scala-dist/scala-dist-pom.xml" file="${src.dir}/build/maven/scala-dist-pom.xml" overwrite="true"/>
+ <jar whenmanifestonly="fail" destfile="${dist.maven}/scala-dist/scala-dist.jar" basedir="${dist.dir}">
+ <include name="bin/" />
+ <include name="doc/" />
+ <include name="man/" />
+ </jar>
+ </target>
+
+ <target name="pack-maven.base" depends="pack-maven.core, osgi.done, docs.done, pack-maven.dist">
<!-- TODO modularize compiler
<mvn-package project="interactive"/>
<mvn-package project="scaladoc"/>
@@ -1786,19 +1781,21 @@ MAIN DISTRIBUTION PACKAGING
<mvn-package project="actors"/>
<!-- don't bother fitting scalap into the mould: it will move out soon -->
- <copy tofile="${maven-base}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
- <copy tofile="${maven-base}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
- <jar destfile="${maven-base}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
+ <mkdir dir="${dist.maven}"/>
+ <copy tofile="${dist.maven}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
+ <copy tofile="${dist.maven}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
+ <jar destfile="${dist.maven}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
<if><not><isset property="docs.skip"/></not><then>
- <jar destfile="${maven-base}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
+ <jar destfile="${dist.maven}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
</then></if>
</target>
+ <!-- TODO: remove this target and delete src/build/maven-deploy.xml -->
<target name="pack-maven.done" depends="pack-maven.base">
<!-- Create dists/maven/latest alias and copy maven-deploy ant build there. -->
<if><isset property="os.win"/><then>
<copy todir="${dists.dir}/maven/latest" overwrite="true">
- <fileset dir="${maven-base}"/>
+ <fileset dir="${dist.maven}"/>
</copy>
</then><else>
<symlink link="${dists.dir}/maven/latest"
@@ -1806,17 +1803,17 @@ MAIN DISTRIBUTION PACKAGING
overwrite="true"/>
</else></if>
<!-- copy build file and its dependencies -->
- <copy todir="${maven-base}"
+ <copy todir="${dist.maven}"
file="${lib-ant.dir}/ant-contrib.jar" overwrite="true"/>
- <copy todir="${maven-base}"
+ <copy todir="${dist.maven}"
file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" overwrite="true"/>
- <copy tofile="${maven-base}/build.xml"
+ <copy tofile="${dist.maven}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
<!-- export properties for use when deploying -->
- <echoproperties destfile="${maven-base}/build.properties"/>
+ <echoproperties destfile="${dist.maven}/build.properties"/>
</target>
- <!-- keep these properties out of ${maven-base}/build.properties, dumped in pack-maven.done -->
+ <!-- keep these properties out of ${dist.maven}/build.properties, dumped in pack-maven.done -->
<target name="init.maven" depends="init">
<property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
<property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
@@ -1840,20 +1837,34 @@ MAIN DISTRIBUTION PACKAGING
<!-- ===========================================================================
MAVEN PUBLISHING
============================================================================ -->
- <!-- TODO: inline maven-deploy.xml here and remove it, once jenkins jobs no longer rely on it -->
- <target name="publish" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the maven repo."> <deploy dir="${maven-base}/"/> </target>
- <target name="publish.local" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the local maven repo."> <deploy dir="${maven-base}/" local="true"/> </target>
- <target name="publish.signed" depends="pack-maven.base, init.maven" description="Publishes signed artifacts to the remote maven repo."> <deploy dir="${maven-base}/" signed="true"/> </target>
+ <target name="publish" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the maven repo.">
+ <deploy />
+ <deploy-pom name="scala-library-all"/>
+ <deploy-jar name="scala-dist"/>
+ </target>
+
+ <target name="publish.local" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the local maven repo.">
+ <deploy local="true"/>
+ <deploy-pom name="scala-library-all" local="true"/>
+ <deploy-jar name="scala-dist" local="true"/>
+ </target>
+
+ <target name="publish.signed" depends="pack-maven.base, init.maven" description="Publishes signed artifacts to the remote maven repo.">
+ <deploy signed="true"/>
+ <deploy-pom name="scala-library-all" signed="true"/>
+ <deploy-jar name="scala-dist" signed="true"/>
+ </target>
<target name="publish-core" depends="pack-maven.core, init.maven">
- <deploy-one dir="${maven-base}/" name="scala-compiler" />
- <deploy-one dir="${maven-base}/" name="scala-library" />
- <deploy-one dir="${maven-base}/" name="scala-reflect" />
+ <deploy-one name="scala-compiler" />
+ <deploy-one name="scala-library" />
+ <deploy-one name="scala-reflect" />
</target>
+
<target name="publish-core-local" depends="pack-maven.core, init.maven">
- <deploy-one dir="${maven-base}/" name="scala-compiler" local="true"/>
- <deploy-one dir="${maven-base}/" name="scala-library" local="true"/>
- <deploy-one dir="${maven-base}/" name="scala-reflect" local="true"/>
+ <deploy-one name="scala-compiler" local="true"/>
+ <deploy-one name="scala-library" local="true"/>
+ <deploy-one name="scala-reflect" local="true"/>
</target>
<target name="publish-core-opt" description="Builds an untested optimised core (library/reflect/compiler) and publishes to maven.">
diff --git a/docs/LICENSE b/doc/LICENSE.md
index 4daedef581..6b039afd68 100644
--- a/docs/LICENSE
+++ b/doc/LICENSE.md
@@ -11,14 +11,14 @@ All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
- * Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
- * Neither the name of the EPFL nor the names of its contributors
- may be used to endorse or promote products derived from this software
- without specific prior written permission.
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
@@ -39,18 +39,22 @@ which are also included in the `licenses/` directory:
### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html)
This license is used by the following third-party libraries:
+
* jansi
### [BSD License](http://www.opensource.org/licenses/bsd-license.php)
This license is used by the following third-party libraries:
+
* jline
### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause)
This license is used by the following third-party libraries:
+
* asm
### [MIT License](http://www.opensource.org/licenses/MIT)
This license is used by the following third-party libraries:
+
* jquery
* jquery-ui
* jquery-layout
@@ -59,5 +63,6 @@ This license is used by the following third-party libraries:
### Public Domain
The following libraries are freely available in the public domain:
+
* forkjoin
diff --git a/doc/License.rtf b/doc/License.rtf
new file mode 100644
index 0000000000..62ec2d023c
--- /dev/null
+++ b/doc/License.rtf
@@ -0,0 +1,65 @@
+{\rtf1\ansi\ansicpg1252\cocoartf1187\cocoasubrtf400
+{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\margl1440\margr1440\vieww25140\viewh18960\viewkind0
+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural
+
+\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}.\
+\
+
+\fs48 Scala License
+\fs40 \
+
+\fs26 Copyright (c) 2002-2013 EPFL\
+Copyright (c) 2011-2013 Typesafe, Inc.\
+All rights reserved.\
+\
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\
+ \'95 Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\
+ \'95 Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\
+ \'95 Neither the name of the EPFL nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\
+\
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'94 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\
+
+\fs52 \
+
+\fs48 Other Licenses
+\fs52 \
+
+\fs26 This software includes projects with the following licenses, which are also included in the
+\fs24 licenses/
+\fs26 directory:\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt Apache License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jansi\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt BSD License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jline\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 asm\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/MIT"}}{\fldrslt MIT License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jquery\
+ \'95 jquery-ui\
+ \'95 jquery-layout\
+ \'95 sizzle\
+ \'95 tools tooltip\
+
+\fs30 \
+Public Domain\
+
+\fs26 The following libraries are freely available in the public domain:\
+ \'95 forkjoin} \ No newline at end of file
diff --git a/docs/README b/doc/README
index 1d5f553d2e..29f64c9fef 100644
--- a/docs/README
+++ b/doc/README
@@ -13,7 +13,7 @@ Scala Tools
- scalac Scala compiler
- fsc Scala resident compiler
- scaladoc Scala API documentation generator
-- scalap Scala classfile decoder
+- scalap Scala classfile decoder
Run the command "scalac -help" to display the list of available
compiler options.
diff --git a/docs/licenses/apache_jansi.txt b/doc/licenses/apache_jansi.txt
index 067a5a6a34..067a5a6a34 100644
--- a/docs/licenses/apache_jansi.txt
+++ b/doc/licenses/apache_jansi.txt
diff --git a/docs/licenses/bsd_asm.txt b/doc/licenses/bsd_asm.txt
index 8613cd33a2..8613cd33a2 100644
--- a/docs/licenses/bsd_asm.txt
+++ b/doc/licenses/bsd_asm.txt
diff --git a/docs/licenses/bsd_jline.txt b/doc/licenses/bsd_jline.txt
index 3e5dba75da..3e5dba75da 100644
--- a/docs/licenses/bsd_jline.txt
+++ b/doc/licenses/bsd_jline.txt
diff --git a/docs/licenses/mit_jquery-layout.txt b/doc/licenses/mit_jquery-layout.txt
index 4af6a0a4b0..4af6a0a4b0 100644
--- a/docs/licenses/mit_jquery-layout.txt
+++ b/doc/licenses/mit_jquery-layout.txt
diff --git a/docs/licenses/mit_jquery-ui.txt b/doc/licenses/mit_jquery-ui.txt
index be226805d3..be226805d3 100644
--- a/docs/licenses/mit_jquery-ui.txt
+++ b/doc/licenses/mit_jquery-ui.txt
diff --git a/docs/licenses/mit_jquery.txt b/doc/licenses/mit_jquery.txt
index ef2c570469..ef2c570469 100644
--- a/docs/licenses/mit_jquery.txt
+++ b/doc/licenses/mit_jquery.txt
diff --git a/docs/licenses/mit_sizzle.txt b/doc/licenses/mit_sizzle.txt
index d81d30aa0f..d81d30aa0f 100644
--- a/docs/licenses/mit_sizzle.txt
+++ b/doc/licenses/mit_sizzle.txt
diff --git a/docs/licenses/mit_tools.tooltip.txt b/doc/licenses/mit_tools.tooltip.txt
index 27a4dbc788..27a4dbc788 100644
--- a/docs/licenses/mit_tools.tooltip.txt
+++ b/doc/licenses/mit_tools.tooltip.txt
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 412d7caab6..a2c3eefbca 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -1,21 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
+<!--
+THIS FILE WILL SOON SELF DESTRUCT; DO NOT USE
+see publish.* targets in /build.xml
+-->
<project name="sabbus-maven-deploy" xmlns:artifact="urn:maven-artifact-ant">
<description>
SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY
</description>
+
<macrodef name="deploy-remote">
<attribute name="jar" default=""/>
<attribute name="pom"/>
<element name="artifacts" implicit="true" optional="true"/>
<sequential>
- <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
- <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:deploy>
+ <if><equals arg1="@{jar}" arg2="true"/><then>
+ <artifact:deploy settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:deploy>
+ </then><else>
+ <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:deploy>
+ </else></if>
</sequential>
</macrodef>
@@ -24,11 +37,19 @@
<attribute name="pom"/>
<element name="artifacts" implicit="true" optional="true"/>
<sequential>
- <artifact:install file="@{jar}">
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:install>
+ <if><equals arg1="@{jar}" arg2="true"/><then>
+ <artifact:install>
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:install>
+ </then><else>
+ <artifact:install file="@{jar}">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:install>
+ </else></if>
</sequential>
</macrodef>
@@ -46,26 +67,25 @@
</sequential>
</macrodef>
- <macrodef name="deploy-one">
- <attribute name="dir" default=""/>
+ <macrodef name="filter-pom">
+ <attribute name="path" />
<attribute name="name" />
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
<sequential>
- <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/>
-
- <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
-
<copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
<filterset>
<filter token="VERSION" value="${maven.version.number}" />
<filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
<filter token="XML_VERSION" value="${scala-xml.version.number}" />
<filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
+ <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" />
+ <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
+ <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" />
<filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
<filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
<filter token="JLINE_VERSION" value="${jline.version}" />
+ <filter token="AKKA_ACTOR_VERSION" value="${akka-actor.version.number}" />
+ <filter token="ACTORS_MIGRATION_VERSION" value="${actors-migration.version.number}" />
<!-- TODO modularize compiler.
<filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}" />
@@ -74,6 +94,20 @@
</filterset>
</copy>
<artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
+ </sequential>
+ </macrodef>
+
+ <macrodef name="deploy-one">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
<if><equals arg1="@{signed}" arg2="false"/><then>
<if><isset property="docs.skip"/><then>
@@ -108,26 +142,94 @@
</sequential>
</macrodef>
+ <macrodef name="deploy-jar">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}.jar with ${path}-pom.xml.</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom"/>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn failonerror="true">
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}.jar" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="deploy-pom">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-pom.xml.</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <deploy-to local="@{local}" pom="@{name}.pom"/>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn failonerror="true">
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}-pom-filtered.xml" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
+ </sequential>
+ </macrodef>
+
<macrodef name="deploy">
- <attribute name="dir" default=""/>
<attribute name="local" default="false"/>
<attribute name="signed" default="false"/>
<sequential>
- <deploy-one dir="@{dir}" name="scala-library" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-reflect" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-compiler" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-library" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-reflect" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler" local="@{local}" signed="@{signed}"/>
<!-- TODO modularize compiler.
- <deploy-one dir="@{dir}" name="scala-compiler-doc" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler-doc" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
-->
- <deploy-one dir="@{dir}" name="scala-actors" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scalap" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-actors" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scalap" local="@{local}" signed="@{signed}"/>
</sequential>
</macrodef>
+
<target name="boot.maven">
<!-- Pull in properties from build -->
<property file="build.properties" />
@@ -162,7 +264,18 @@
</echo>
</target>
- <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target>
- <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target>
- <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target>
+ <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish` FROM /build.xml"/>
+ <deploy/>
+ </target>
+
+ <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.local` FROM /build.xml"/>
+ <deploy local="true"/>
+ </target>
+
+ <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.signed` FROM /build.xml"/>
+ <deploy signed="true"/>
+ </target>
</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index a16fe22343..4a000b27a1 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -50,7 +50,7 @@
<artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
<version>@PARSER_COMBINATORS_VERSION@</version>
</dependency>
- <dependency> <!-- for scala-compiler-repl-->
+ <dependency> <!-- for scala-compiler-repl; once it moves there, make it required -->
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>@JLINE_VERSION@</version>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
new file mode 100644
index 0000000000..413da928bb
--- /dev/null
+++ b/src/build/maven/scala-dist-pom.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-dist</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Distribution Artifacts</name>
+ <description>The Artifacts Distributed with Scala</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library-all</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <!-- duplicated from scala-compiler, where it's optional,
+ so that resolving scala-dist's transitive dependencies does not include jline,
+ even though we need to include it in the dist, but macros depending on the compiler
+ shouldn't have to require jline...
+ another reason to modularize and move the dependency to scala-compiler-repl
+ TODO: remove duplication once we have the scala-compiler-repl module -->
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>@JLINE_VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
new file mode 100644
index 0000000000..f34a28e79a
--- /dev/null
+++ b/src/build/maven/scala-library-all-pom.xml
@@ -0,0 +1,99 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library-all</artifactId>
+ <packaging>pom</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Library Powerpack</name>
+ <description>The Scala Standard Library and Official Modules</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@XML_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@PARSER_COMBINATORS_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>scala-continuations-plugin_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@CONTINUATIONS_PLUGIN_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>scala-continuations-library_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@CONTINUATIONS_LIBRARY_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-swing_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@SCALA_SWING_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-actor_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@AKKA_ACTOR_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors-migration_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@ACTORS_MIGRATION_VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
index 5be5abd895..dd2d63ad17 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
@@ -835,6 +835,7 @@ abstract class BCodeTypes extends BCodeIdiomatic {
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.hasEnumFlag) ACC_ENUM else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index e92f8c2541..7e1a82a155 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -244,6 +244,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.hasEnumFlag) ACC_ENUM else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0,
if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
)
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 7932dd3459..9875d27047 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -792,7 +792,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val superclazz =
AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
- ClassDef(mods, name, List(),
+ ClassDef(mods | Flags.ENUM, name, List(),
makeTemplate(superclazz :: interfaces, body))
})
}
@@ -811,10 +811,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead()
accept(RBRACE)
}
- // The STABLE flag is to signal to namer that this was read from a
- // java enum, and so should be given a Constant type (thereby making
- // it usable in annotations.)
- ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
+ ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
}
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 183752d4a2..7837f9a11a 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -126,10 +126,11 @@ object Plugin {
}
/** Load all plugins specified by the arguments.
- * Each of `jars` must be a valid plugin archive or exploded archive.
+ * Each location of `paths` must be a valid plugin archive or exploded archive.
+ * Each of `paths` must define one plugin.
* Each of `dirs` may be a directory containing arbitrary plugin archives.
* Skips all plugins named in `ignoring`.
- * A single classloader is created and used to load all of them.
+ * A classloader is created to load each plugin.
*/
def loadAllFrom(
paths: List[List[Path]],
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 1f9987c83b..6ec364bcb6 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -94,11 +94,11 @@ trait ScalaSettings extends AbsScalaSettings
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
- val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
- val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
+ val plugin = MultiStringSetting("-Xplugin", "paths", "Load a plugin from each classpath.")
+ val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable plugins by name.")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
- val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
- val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
+ val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.")
+ val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath)
val Xprint = PhasesSetting ("-Xprint", "Print out program after")
val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 2b96961291..664645e53e 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -515,7 +515,7 @@ abstract class ClassfileParser {
val info = readType()
val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags)
- // Note: the info may be overrwritten later with a generic signature
+ // Note: the info may be overwritten later with a generic signature
// parsed from SignatureATTR
sym setInfo {
if (jflags.isEnum) ConstantType(Constant(sym))
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 844774e75f..ef50ae276f 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -457,12 +457,11 @@ abstract class UnCurry extends InfoTransform
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = transform(fn)
- val args1 = transformTrees(fn.symbol.name match {
- case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, localTyper.expectedPatternTypes(fn, args))
- case _ => sys.error("internal error: UnApply node has wrong symbol")
- })
+ val fn1 = transform(fn)
+ val args1 = fn.symbol.name match {
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes)
+ case _ => args
+ }
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 63f4a4bf25..699e98f963 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -31,6 +31,30 @@ trait MatchTranslation {
trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
import typer.context
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall unapply
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall unapply
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object SymbolBound {
def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
@@ -86,10 +110,8 @@ trait MatchTranslation {
// example check: List[Int] <:< ::[Int]
private def extractorStep(): TranslationStep = {
- import extractor.{ paramType, treeMaker }
- if (!extractor.isTyped)
- ErrorUtils.issueNormalTypeError(tree, "Could not typecheck extractor call: "+ extractor)(context)
-
+ def paramType = extractor.aligner.wholeType
+ import extractor.treeMaker
// chain a type-testing extractor before the actual extractor call
// it tests the type, checks the outer pointer and casts to the expected type
// TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
@@ -355,36 +377,20 @@ trait MatchTranslation {
object ExtractorCall {
// TODO: check unargs == args
def apply(tree: Tree): ExtractorCall = tree match {
- case UnApply(unfun, args) => new ExtractorCallRegular(unfun, args) // extractor
- case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class
+ case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree), fun, args) // case class
}
}
- abstract class ExtractorCall {
+ abstract class ExtractorCall(val aligner: PatternAligned) {
+ import aligner._
def fun: Tree
def args: List[Tree]
- val nbSubPats = args.length
- val starLength = if (hasStar) 1 else 0
- val nonStarLength = args.length - starLength
-
- // everything okay, captain?
- def isTyped: Boolean
- def isSeq: Boolean
-
- private def hasStar = nbSubPats > 0 && isStar(args.last)
- private def isNonEmptySeq = nbSubPats > 0 && isSeq
-
- /** This is special cased so that a single pattern will accept any extractor
- * result, even if it's a tuple (SI-6675)
- */
- def isSingle = nbSubPats == 1 && !isSeq
-
- // to which type should the previous binder be casted?
- def paramType : Type
-
- protected def rawSubPatTypes: List[Type]
- protected def resultType: Type
+ // don't go looking for selectors if we only expect one pattern
+ def rawSubPatTypes = aligner.extractedTypes
+ def resultInMonad = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
+ def resultType = fun.tpe.finalResultType
/** Create the TreeMaker that embodies this extractor call
*
@@ -407,24 +413,14 @@ trait MatchTranslation {
lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
// do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- private def nonStarSubPatTypes = formalTypes(rawInit :+ repeatedType, nonStarLength)
+ private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe)
- def subPatTypes: List[Type] = (
- if (rawSubPatTypes.isEmpty || !isSeq) rawSubPatTypes
- else if (hasStar) nonStarSubPatTypes :+ sequenceType
- else nonStarSubPatTypes
- )
-
- private def rawGet = typeOfMemberNamedGetOrSelf(resultType)
- private def rawInit = rawSubPatTypes dropRight 1
- protected def sequenceType = typeOfLastSelectorOrSelf(rawGet)
- protected def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
- protected def repeatedType = scalaRepeatedType(elementType)
+ def subPatTypes: List[Type] = typedPatterns map (_.tpe)
- // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected def firstIndexingBinder = rawSubPatTypes.length - 1
- protected def lastIndexingBinder = nbSubPats - 1 - starLength
- protected def expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ // there are `productArity` non-seq elements in the tuple.
+ protected def firstIndexingBinder = productArity
+ protected def expectedLength = elementArity
+ protected def lastIndexingBinder = totalArity - starArity - 1
private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
@@ -438,12 +434,12 @@ trait MatchTranslation {
// referenced by `binder`
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
def lastTrees: List[Tree] = (
- if (!hasStar) Nil
+ if (!aligner.isStar) Nil
else if (expectedLength == 0) seqTree(binder) :: Nil
else genDrop(binder, expectedLength)
)
// this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
// [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
// [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
@@ -457,8 +453,10 @@ trait MatchTranslation {
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] =
- if (isNonEmptySeq) subPatRefsSeq(binder) else productElemsToN(binder, nbSubPats)
+ protected def subPatRefs(binder: Symbol): List[Tree] = (
+ if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
+ else productElemsToN(binder, totalArity)
+ )
private def compareInts(t1: Tree, t2: Tree) =
gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
@@ -478,7 +476,7 @@ trait MatchTranslation {
// when the last subpattern is a wildcard-star the expectedLength is but a lower bound
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
- if (hasStar) _ INT_>= _
+ if (aligner.isStar) _ INT_>= _
else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
@@ -487,26 +485,14 @@ trait MatchTranslation {
def checkedLength: Option[Int] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || expectedLength < starLength) None
+ if (!isSeq || expectedLength < starArity) None
else Some(expectedLength)
}
// TODO: to be called when there's a def unapplyProd(x: T): U
// U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(val fun: Tree, val args: List[Tree]) extends ExtractorCall {
- private def constructorTp = fun.tpe
-
- def isTyped = fun.isTyped
-
- // to which type should the previous binder be casted?
- def paramType = constructorTp.finalResultType
- def resultType = fun.tpe.finalResultType
-
- def isSeq = isVarArgTypes(rawSubPatTypes)
-
- protected def rawSubPatTypes = constructorTp.paramTypes
-
+ class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -535,20 +521,11 @@ trait MatchTranslation {
if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
-
- override def toString() = s"ExtractorCallProd($fun:${fun.tpe} / ${fun.symbol} / args=$args)"
}
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall {
+ class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall(aligner) {
val Unapplied(fun) = extractorCallIncludingDummy
- def tpe = fun.tpe
- def paramType = firstParamType(tpe)
- def resultType = tpe.finalResultType
- def isTyped = (tpe ne NoType) && fun.isTyped && (resultInMonad ne ErrorType)
- def isSeq = fun.symbol.name == nme.unapplySeq
- def isBool = resultType =:= BooleanTpe
-
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -571,7 +548,7 @@ trait MatchTranslation {
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
subPatBinders,
subPatRefs(binder),
- isBool,
+ aligner.isBool,
checkedLength,
patBinderOrCasted,
ignoredSubPatBinders
@@ -583,9 +560,9 @@ trait MatchTranslation {
else super.seqTree(binder)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ // require (totalArity > 0 && (!lastIsStar || isSeq))
override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (isSingle) REF(binder) :: Nil // special case for extractors
+ if (aligner.isSingle) REF(binder) :: Nil // special case for extractors
else super.subPatRefs(binder)
protected def spliceApply(binder: Symbol): Tree = {
@@ -606,40 +583,7 @@ trait MatchTranslation {
splice transform extractorCallIncludingDummy
}
- // what's the extractor's result type in the monad? It is the type of its nullary member `get`.
- protected lazy val resultInMonad: Type = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
-
- protected lazy val rawSubPatTypes = (
- if (isBool) Nil
- else if (isSingle) resultInMonad :: Nil // don't go looking for selectors if we only expect one pattern
- else typesOfSelectorsOrSelf(resultInMonad)
- )
-
- override def toString() = s"ExtractorCallRegular($fun: $tpe / ${fun.symbol})"
- }
-
- /** A conservative approximation of which patterns do not discern anything.
- * They are discarded during the translation.
- */
- object WildcardPattern {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Star(WildcardPattern()) => true
- case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall unapply
- case EmptyTree => true
- case _ => false
- }
- }
-
- object PatternBoundToUnderscore {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall unapply
- case Typed(PatternBoundToUnderscore(), _) => true
- case _ => false
- }
+ override def rawSubPatTypes = aligner.extractor.varargsTypes
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 7df03044aa..a80f158949 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -395,8 +395,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
lazy val outerTestNeeded = (
- !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+ (expectedTp.prefix ne NoPrefix)
+ && !expectedTp.prefix.typeSymbol.isPackageClass
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner)
+ )
// the logic to generate the run-time test that follows from the fact that
// a `prevBinder` is expected to have type `expectedTp`
@@ -406,44 +408,52 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
import cs._
- def default =
- // do type test first to ensure we won't select outer on null
- if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
- else typeTest(testedBinder, expectedTp)
-
// propagate expected type
def expTp(t: Tree): t.type = t setType expectedTp
+ def testedWide = testedBinder.info.widen
+ def expectedWide = expectedTp.widen
+ def isAnyRef = testedWide <:< AnyRefTpe
+ def isAsExpected = testedWide <:< expectedTp
+ def isExpectedPrimitiveType = isAsExpected && isPrimitiveValueType(expectedTp)
+ def isExpectedReferenceType = isAsExpected && (expectedTp <:< AnyRefTpe)
+ def mkNullTest = nonNullTest(testedBinder)
+ def mkOuterTest = outerTest(testedBinder, expectedTp)
+ def mkTypeTest = typeTest(testedBinder, expectedWide)
+
+ def mkEqualsTest(lhs: Tree): cs.Result = equalsTest(lhs, testedBinder)
+ def mkEqTest(lhs: Tree): cs.Result = eqTest(lhs, testedBinder)
+ def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res
+
+ // If we conform to expected primitive type:
+ // it cannot be null and cannot have an outer pointer. No further checking.
+ // If we conform to expected reference type:
+ // have to test outer and non-null
+ // If we do not conform to expected type:
+ // have to test type and outer (non-null is implied by successful type test)
+ def mkDefault = (
+ if (isExpectedPrimitiveType) tru
+ else addOuterTest(
+ if (isExpectedReferenceType) mkNullTest
+ else mkTypeTest
+ )
+ )
+
// true when called to type-test the argument to an extractor
// don't do any fancy equality checking, just test the type
- if (extractorArgTypeTest) default
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ if (extractorArgTypeTest) mkDefault
else expectedTp match {
- // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
- // this implies sym.isStable
- case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
- // must use == to support e.g. List() == Nil
- case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefTpe
- => eqTest(expTp(CODE.NULL), testedBinder)
- case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
- case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
-
- // TODO: verify that we don't need to special-case Array
- // I think it's okay:
- // - the isInstanceOf test includes a test for the element type
- // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if testedBinder.info.widen <:< expectedTp =>
- // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
- // since the types conform, no further checking is required
- if (isPrimitiveValueType(expectedTp)) tru
- // have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefTpe) {
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
- } else default
-
- case _ => default
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types - this implies sym.isStable
+ case SingleType(_, sym) => and(mkEqualsTest(gen.mkAttributedQualifier(expectedTp)), mkTypeTest)
+ case ThisType(sym) if sym.isModule => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil
+ case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL))
+ case ConstantType(const) => mkEqualsTest(expTp(Literal(const)))
+ case ThisType(sym) => mkEqTest(expTp(This(sym)))
+ case _ => mkDefault
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
new file mode 100644
index 0000000000..e84ccbf754
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala
@@ -0,0 +1,155 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*]
+ * A case matches: P1, P2, ..., Pj, opt[Seq[E]]
+ * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]]
+ *
+ * Here Pm/Fi is the last pattern to match the fixed arity section.
+ *
+ * productArity: the value of i, i.e. the number of non-sequence types in the extractor
+ * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition
+ * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements
+ * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern
+ * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition
+ *
+ * Note that productArity is a function only of the extractor, and
+ * nonStar/star/totalArity are all functions of the patterns. The key
+ * value for aligning and typing the patterns is elementArity, as it
+ * is derived from both sets of information.
+ */
+trait PatternExpander[Pattern, Type] {
+ /** You'll note we're not inside the cake. "Pattern" and "Type" are
+ * arbitrary types here, and NoPattern and NoType arbitrary values.
+ */
+ def NoPattern: Pattern
+ def NoType: Type
+
+ /** It's not optimal that we're carrying both sequence and repeated
+ * type here, but the implementation requires more unraveling before
+ * it can be avoided.
+ *
+ * sequenceType is Seq[T], elementType is T, repeatedType is T*.
+ */
+ sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) {
+ def exists = elementType != NoType
+
+ def elementList = if (exists) elementType :: Nil else Nil
+ def sequenceList = if (exists) sequenceType :: Nil else Nil
+ def repeatedList = if (exists) repeatedType :: Nil else Nil
+
+ override def toString = s"${elementType}*"
+ }
+ object NoRepeated extends Repeated(NoType, NoType, NoType) {
+ override def toString = "<none>"
+ }
+
+ final case class Patterns(fixed: List[Pattern], star: Pattern) {
+ def hasStar = star != NoPattern
+ def starArity = if (hasStar) 1 else 0
+ def nonStarArity = fixed.length
+ def totalArity = nonStarArity + starArity
+ def starPatterns = if (hasStar) star :: Nil else Nil
+ def all = fixed ::: starPatterns
+
+ override def toString = all mkString ", "
+ }
+
+ /** An 'extractor' can be a case class or an unapply or unapplySeq method.
+ * Decoding what it is that they extract takes place before we arrive here,
+ * so that this class can concentrate only on the relationship between
+ * patterns and types.
+ *
+ * In a case class, the class is the unextracted type and the fixed and
+ * repeated types are derived from its constructor parameters.
+ *
+ * In an unapply, this is reversed: the parameter to the unapply is the
+ * unextracted type, and the other types are derived based on the return
+ * type of the unapply method.
+ *
+ * In other words, this case class and unapply are encoded the same:
+ *
+ * case class Foo(x: Int, y: Int, zs: Char*)
+ * def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])]
+ *
+ * Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*))
+ *
+ * @param whole The type in its unextracted form
+ * @param fixed The non-sequence types which are extracted
+ * @param repeated The sequence type which is extracted
+ */
+ final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated) {
+ require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)")
+
+ def productArity = fixed.length
+ def hasSeq = repeated.exists
+ def elementType = repeated.elementType
+ def sequenceType = repeated.sequenceType
+ def allTypes = fixed ::: repeated.sequenceList
+ def varargsTypes = fixed ::: repeated.repeatedList
+ def isErroneous = allTypes contains NoType
+
+ private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil )
+
+ def offeringString = if (isErroneous) "<error>" else typeStrings match {
+ case Nil => "Boolean"
+ case tp :: Nil => tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+ override def toString = "%s => %s".format(whole, offeringString)
+ }
+
+ final case class TypedPat(pat: Pattern, tpe: Type) {
+ override def toString = s"$pat: $tpe"
+ }
+
+ /** If elementArity is...
+ * 0: A perfect match between extractor and the fixed patterns.
+ * If there is a star pattern it will match any sequence.
+ * > 0: There are more patterns than products. There will have to be a
+ * sequence which can populate at least <elementArity> patterns.
+ * < 0: There are more products than patterns: compile time error.
+ */
+ final case class Aligned(patterns: Patterns, extractor: Extractor) {
+ def elementArity = patterns.nonStarArity - productArity
+ def productArity = extractor.productArity
+ def starArity = patterns.starArity
+ def totalArity = patterns.totalArity
+
+ def wholeType = extractor.whole
+ def sequenceType = extractor.sequenceType
+ def productTypes = extractor.fixed
+ def extractedTypes = extractor.allTypes
+ def typedNonStarPatterns = products ::: elements
+ def typedPatterns = typedNonStarPatterns ::: stars
+
+ def isBool = !isSeq && productArity == 0
+ def isSingle = !isSeq && totalArity == 1
+ def isStar = patterns.hasStar
+ def isSeq = extractor.hasSeq
+
+ private def typedAsElement(pat: Pattern) = TypedPat(pat, extractor.elementType)
+ private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType)
+ private def productPats = patterns.fixed take productArity
+ private def elementPats = patterns.fixed drop productArity
+ private def products = (productPats, productTypes).zipped map TypedPat
+ private def elements = elementPats map typedAsElement
+ private def stars = patterns.starPatterns map typedAsSequence
+
+ override def toString = s"""
+ |Aligned {
+ | patterns $patterns
+ | extractor $extractor
+ | arities $productArity/$elementArity/$starArity // product/element/star
+ | typed ${typedPatterns mkString ", "}
+ |}""".stripMargin.trim
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index 394ba98f17..f6c960d089 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -34,7 +34,8 @@ import scala.reflect.internal.util.Position
* - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
* - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
*/
-trait PatternMatching extends Transform with TypingTransformers
+trait PatternMatching extends Transform
+ with TypingTransformers
with Debugging
with Interface
with MatchTranslation
@@ -45,7 +46,8 @@ trait PatternMatching extends Transform with TypingTransformers
with Solving
with MatchAnalysis
with MatchOptimization
- with MatchWarnings {
+ with MatchWarnings
+ with ScalacPatternExpanders {
import global._
val phaseName: String = "patmat"
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
new file mode 100644
index 0000000000..7858cb5586
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -0,0 +1,154 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package transform
+package patmat
+
+/** This is scalac-specific logic layered on top of the scalac-agnostic
+ * "matching products to patterns" logic defined in PatternExpander.
+ */
+trait ScalacPatternExpanders {
+ val global: Global
+
+ import global._
+ import definitions._
+ import treeInfo._
+
+ type PatternAligned = ScalacPatternExpander#Aligned
+
+ implicit class AlignedOps(val aligned: PatternAligned) {
+ import aligned._
+ def expectedTypes = typedPatterns map (_.tpe)
+ def unexpandedFormals = extractor.varargsTypes
+ }
+ trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
+ def NoPattern = EmptyTree
+ def NoType = global.NoType
+
+ def newPatterns(patterns: List[Tree]): Patterns = patterns match {
+ case init :+ last if isStar(last) => Patterns(init, last)
+ case _ => Patterns(patterns, NoPattern)
+ }
+ def elementTypeOf(tpe: Type) = {
+ val seq = repeatedToSeq(tpe)
+
+ ( typeOfMemberNamedHead(seq)
+ orElse typeOfMemberNamedApply(seq)
+ orElse definitions.elementType(ArrayClass, seq)
+ )
+ }
+ def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor =
+ logResult(s"newExtractor($whole, $fixed, $repeated")(Extractor(whole, fixed, repeated))
+
+ // Turn Seq[A] into Repeated(Seq[A], A, A*)
+ def repeatedFromSeq(seqType: Type): Repeated = {
+ val elem = elementTypeOf(seqType)
+ val repeated = scalaRepeatedType(elem)
+
+ Repeated(seqType, elem, repeated)
+ }
+ // Turn A* into Repeated(Seq[A], A, A*)
+ def repeatedFromVarargs(repeated: Type): Repeated =
+ Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated)
+
+ /** In this case we are basing the pattern expansion on a case class constructor.
+ * The argument is the MethodType carried by the primary constructor.
+ */
+ def applyMethodTypes(method: Type): Extractor = {
+ val whole = method.finalResultType
+
+ method.paramTypes match {
+ case init :+ last if isScalaRepeatedParamType(last) => newExtractor(whole, init, repeatedFromVarargs(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+
+ /** In this case, expansion is based on an unapply or unapplySeq method.
+ * Unfortunately the MethodType does not carry the information of whether
+ * it was unapplySeq, so we have to funnel that information in separately.
+ */
+ def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
+ val whole = firstParamType(method)
+ val result = method.finalResultType
+ val expanded = (
+ if (result =:= BooleanTpe) Nil
+ else typeOfMemberNamedGet(result) match {
+ case rawGet if !hasSelectors(rawGet) => rawGet :: Nil
+ case rawGet => typesOfSelectors(rawGet)
+ }
+ )
+ expanded match {
+ case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last))
+ case tps => newExtractor(whole, tps, NoRepeated)
+ }
+ }
+ }
+ object alignPatterns extends ScalacPatternExpander {
+ /** Converts a T => (A, B, C) extractor to a T => ((A, B, CC)) extractor.
+ */
+ def tupleExtractor(extractor: Extractor): Extractor =
+ extractor.copy(fixed = tupleType(extractor.fixed) :: Nil)
+
+ private def validateAligned(tree: Tree, aligned: Aligned): Aligned = {
+ import aligned._
+
+ def owner = tree.symbol.owner
+ def offering = extractor.offeringString
+ def symString = tree.symbol.fullLocationString
+ def offerString = if (extractor.isErroneous) "" else s" offering $offering"
+ def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity
+
+ def err(msg: String) = currentUnit.error(tree.pos, msg)
+ def warn(msg: String) = currentUnit.warning(tree.pos, msg)
+ def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity")
+
+ if (isStar && !isSeq)
+ err("Star pattern must correspond with varargs or unapplySeq")
+ else if (elementArity < 0)
+ arityError("not enough")
+ else if (elementArity > 0 && !extractor.hasSeq)
+ arityError("too many")
+
+ aligned
+ }
+
+ def apply(sel: Tree, args: List[Tree]): Aligned = {
+ val fn = sel match {
+ case Unapplied(fn) => fn
+ case _ => sel
+ }
+ val patterns = newPatterns(args)
+ val isSeq = sel.symbol.name == nme.unapplySeq
+ val isUnapply = sel.symbol.name == nme.unapply
+ val extractor = sel.symbol.name match {
+ case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+ case _ => applyMethodTypes(fn.tpe)
+ }
+
+ /** Rather than let the error that is SI-6675 pollute the entire matching
+ * process, we will tuple the extractor before creation Aligned so that
+ * it contains known good values.
+ */
+ def productArity = extractor.productArity
+ def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}"
+ val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1
+
+ if (settings.lint && requiresTupling && effectivePatternArity(args) == 1)
+ currentUnit.warning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+ val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor
+ validateAligned(fn, Aligned(patterns, normalizedExtractor))
+ }
+
+ def apply(tree: Tree): Aligned = tree match {
+ case Apply(fn, args) => apply(fn, args)
+ case UnApply(fn, args) => apply(fn, args)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index cd6b77404d..4d0eda2377 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -625,8 +625,7 @@ trait ContextErrors {
setError(tree)
}
- def CaseClassConstructorError(tree: Tree) = {
- val baseMessage = tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method"
+ def CaseClassConstructorError(tree: Tree, baseMessage: String) = {
val addendum = directUnapplyMember(tree.symbol.info) match {
case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
case _ => ""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 14695f5939..27e8698676 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -122,10 +122,31 @@ trait Namers extends MethodSynthesis {
|| (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
|| (vd.name startsWith nme.OUTER)
|| (context.unit.isJava)
+ || isEnumConstant(vd)
)
+
def noFinishGetterSetter(vd: ValDef) = (
(vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
- || vd.symbol.isModuleVar)
+ || vd.symbol.isModuleVar
+ || isEnumConstant(vd))
+
+ /** Determines whether this field holds an enum constant.
+ * To qualify, the following conditions must be met:
+ * - The field's class has the ENUM flag set
+ * - The field's class extends java.lang.Enum
+ * - The field has the ENUM flag set
+ * - The field is static
+ * - The field is stable
+ */
+ def isEnumConstant(vd: ValDef) = {
+ val ownerHasEnumFlag =
+ // Necessary to check because scalac puts Java's static members into the companion object
+ // while Scala's enum constants live directly in the class.
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // cyclic reference error. See the commit message for details.
+ if (context.unit.isJava) owner.companionClass.hasEnumFlag else owner.hasEnumFlag
+ vd.mods.hasAllFlags(ENUM | STABLE | STATIC) && ownerHasEnumFlag
+ }
def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T =
if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
@@ -620,11 +641,7 @@ trait Namers extends MethodSynthesis {
else
enterGetterSetter(tree)
- // When java enums are read from bytecode, they are known to have
- // constant types by the jvm flag and assigned accordingly. When
- // they are read from source, the java parser marks them with the
- // STABLE flag, and now we receive that signal.
- if (tree.symbol hasAllFlags STABLE | JAVA)
+ if (isEnumConstant(tree))
tree.symbol setInfo ConstantType(Constant(tree.symbol))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index 069d6d5fb2..41c656f8ce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -78,26 +78,34 @@ trait PatternTypers {
// Do some ad-hoc overloading resolution and update the tree's symbol and type
// do not update the symbol if the tree's symbol's type does not define an unapply member
// (e.g. since it's some method that returns an object with an unapply member)
- val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
- def caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+ val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ val caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+ val member = unapplyMember(fun.tpe)
+ def resultType = (fun.tpe memberType member).finalResultType
+ def isEmptyType = resultOfMatchingMethod(resultType, nme.isEmpty)()
+ def isOkay = (
+ resultType.isErroneous
+ || (resultType <:< BooleanTpe)
+ || (isEmptyType <:< BooleanTpe)
+ || member.isMacro
+ || member.isOverloaded // the whole overloading situation is over the rails
+ )
// Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
// A case class with 23+ params has no unapply method.
- // A case class constructor be overloaded with unapply methods in the companion.
- if (caseClass.isCase && !unapplyMember(fun.tpe).isOverloaded)
+ // A case class constructor may be overloaded with unapply methods in the companion.
+ if (caseClass.isCase && !member.isOverloaded)
logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt))
- else if (hasUnapplyMember(fun))
+ else if (!reallyExists(member))
+ CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member")
+ else if (isOkay)
fun
+ else if (isEmptyType == NoType)
+ CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean")
else
- CaseClassConstructorError(fun)
+ CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: $isEmptyType)")
}
- def expectedPatternTypes(fun: Tree, args: List[Tree]): List[Type] =
- newExtractorShape(fun, args).expectedPatternTypes
-
- def typedPatternArgs(fun: Tree, args: List[Tree], mode: Mode): List[Tree] =
- typedArgsForFormals(args, newExtractorShape(fun, args).formals, mode)
-
def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
def typedArgWithFormal(arg: Tree, pt: Type) = {
val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
@@ -158,109 +166,6 @@ trait PatternTypers {
case _ => wrapClassTagUnapply(treeTyped, extractor, tpe)
}
}
-
- def newExtractorShape(fun: Tree, args: List[Tree]): ExtractorShape = ExtractorShape(fun, args)
-
- case class CaseClassInfo(clazz: Symbol, classType: Type) {
- def constructor = clazz.primaryConstructor
- def constructorType = classType.prefix memberType clazz memberType constructor
- def accessors = clazz.caseFieldAccessors
- }
- object NoCaseClassInfo extends CaseClassInfo(NoSymbol, NoType) {
- override def toString = "NoCaseClassInfo"
- }
-
- case class UnapplyMethodInfo(unapply: Symbol, tpe: Type) {
- def name = unapply.name
- def isUnapplySeq = name == nme.unapplySeq
- def unapplyType = tpe memberType method
- def resultType = tpe.finalResultType
- def method = unapplyMember(tpe)
- def paramType = firstParamType(unapplyType)
- def rawGet = if (isBool) UnitTpe else typeOfMemberNamedGetOrSelf(resultType)
- def rawTypes = if (isBool) Nil else typesOfSelectorsOrSelf(rawGet)
- def isBool = resultType =:= BooleanTpe // aka "Tuple0" or "Option[Unit]"
- }
-
- object NoUnapplyMethodInfo extends UnapplyMethodInfo(NoSymbol, NoType) {
- override def toString = "NoUnapplyMethodInfo"
- }
-
- case class ExtractorShape(fun: Tree, args: List[Tree]) {
- def pos = fun.pos
- private def symbol = fun.symbol
- private def tpe = fun.tpe
-
- val ccInfo = tpe.typeSymbol.linkedClassOfClass match {
- case clazz if clazz.isCase => CaseClassInfo(clazz, tpe)
- case _ => NoCaseClassInfo
- }
- val exInfo = UnapplyMethodInfo(symbol, tpe)
- import exInfo.{ rawGet, rawTypes, isUnapplySeq }
-
- override def toString = s"ExtractorShape($fun, $args)"
-
- def unapplyMethod = exInfo.method
- def unapplyType = exInfo.unapplyType
- def unapplyParamType = exInfo.paramType
- def enclClass = symbol.enclClass
-
- // TODO - merge these. The difference between these two methods is that expectedPatternTypes
- // expands the list of types so it is the same length as the number of patterns, whereas formals
- // leaves the varargs type unexpanded.
- def formals = (
- if (isUnapplySeq) productTypes :+ varargsType
- else if (elementArity == 0) productTypes
- else if (isSingle) squishIntoOne()
- else wrongArity(patternFixedArity)
- )
- def expectedPatternTypes = elementArity match {
- case 0 => productTypes
- case _ if elementArity > 0 && isUnapplySeq => productTypes ::: elementTypes
- case _ if productArity > 1 && patternFixedArity == 1 => squishIntoOne()
- case _ => wrongArity(patternFixedArity)
- }
-
- def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
-
- private def hasBogusExtractor = directUnapplyMember(tpe).exists && !unapplyMethod.exists
- private def expectedArity = "" + productArity + ( if (isUnapplySeq) "+" else "")
- private def wrongArityMsg(n: Int) = (
- if (hasBogusExtractor) s"$enclClass does not define a valid extractor method"
- else s"wrong number of patterns for $enclClass offering $rawTypes_s: expected $expectedArity, found $n"
- )
- private def rawTypes_s = rawTypes match {
- case Nil => "()"
- case tp :: Nil => "" + tp
- case tps => tps.mkString("(", ", ", ")")
- }
-
- private def err(msg: String) = { unit.error(pos, msg) ; throw new TypeError(msg) }
- private def wrongArity(n: Int) = err(wrongArityMsg(n))
-
- def squishIntoOne() = {
- if (settings.lint)
- unit.warning(pos, s"$enclClass expects $expectedArity patterns to hold $rawGet but crushing into $productArity-tuple to fit single pattern (SI-6675)")
-
- rawGet :: Nil
- }
- // elementArity is the number of non-sequence patterns minus the
- // the number of non-sequence product elements returned by the extractor.
- // If it is zero, there is a perfect match between those parts, and
- // if there is a wildcard star it will match any sequence.
- // If it is positive, there are more patterns than products,
- // so a sequence will have to fill in the elements. If it is negative,
- // there are more products than patterns, which is a compile time error.
- def elementArity = patternFixedArity - productArity
- def patternFixedArity = treeInfo effectivePatternArity args
- def productArity = productTypes.size
- def isSingle = !isUnapplySeq && (patternFixedArity == 1)
-
- def productTypes = if (isUnapplySeq) rawTypes dropRight 1 else rawTypes
- def elementTypes = List.fill(elementArity)(elementType)
- def varargsType = scalaRepeatedType(elementType)
- }
-
private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
@@ -365,10 +270,12 @@ trait PatternTypers {
case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
case _ => UnapplyWithSingleArgError(fun) ; ErrorType
}
- val shape = newExtractorShape(fun, args)
- import shape.{ unapplyParamType, unapplyType, unapplyMethod }
+ val unapplyMethod = unapplyMember(fun.tpe)
+ val unapplyType = fun.tpe memberType unapplyMethod
+ val unapplyParamType = firstParamType(unapplyType)
+ def isSeq = unapplyMethod.name == nme.unapplySeq
- def extractor = extractorForUncheckedType(shape.pos, unapplyParamType)
+ def extractor = extractorForUncheckedType(fun.pos, unapplyParamType)
def canRemedy = unapplyParamType match {
case RefinedType(_, decls) if !decls.isEmpty => false
case RefinedType(parents, _) if parents exists isUncheckable => false
@@ -400,7 +307,8 @@ trait PatternTypers {
// the union of the expected type and the inferred type of the argument to unapply
val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
- val args1 = typedPatternArgs(fun1, args, mode)
+ val formals = patmat.alignPatterns(fun1, args).unexpandedFormals
+ val args1 = typedArgsForFormals(args, formals, mode)
val result = UnApply(fun1, args1) setPos tree.pos setType glbType
if (wrapInTypeTest)
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index 0722d808bf..4795a47efe 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -2,21 +2,54 @@ This is the documentation for the Scala standard library.
== Package structure ==
-The [[scala]] package contains core types.
-
-[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation.
-
-Other important packages include:
-
- - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang.
- - [[scala.io `scala.io`]] - Input and output.
- - [[scala.math `scala.math`]] - Basic math functions and additional numeric types.
- - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system.
- - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions.
- - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing.
- - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization.
-
-Many other packages exist. See the complete list on the left.
+The [[scala]] package contains core types like [[scala.Int `Int`]], [[scala.Float `Float`]], [[scala.Array `Array`]]
+or [[scala.Option `Option`]] which are accessible in all Scala compilation units without explicit qualification or
+imports.
+
+Notable packages include:
+
+ - [[scala.collection `scala.collection`]] and its sub-packages contain Scala's collections framework
+ - [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as
+ [[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]],
+ [[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or
+ [[scala.collection.immutable.HashSet `HasSet`]]
+ - [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as
+ [[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]],
+ [[scala.collection.mutable.StringBuilder `StringBuilder`]],
+ [[scala.collection.mutable.HashMap `HashMap`]] or [[scala.collection.mutable.HashSet `HashSet`]]
+ - [[scala.collection.concurrent `scala.collection.concurrent`]] - Mutable, concurrent data-structures such as
+ [[scala.collection.concurrent.TrieMap `TrieMap`]]
+ - [[scala.collection.parallel.immutable `scala.collection.parallel.immutable`]] - Immutable, parallel
+ data-structures such as [[scala.collection.parallel.immutable.ParVector `ParVector`]],
+ [[scala.collection.parallel.immutable.ParRange `ParRange`]],
+ [[scala.collection.parallel.immutable.ParHashMap `ParHashMap`]] or
+ [[scala.collection.parallel.immutable.ParHashSet `ParHashSet`]]
+ - [[scala.collection.parallel.mutable `scala.collection.parallel.mutable`]] - Mutable, parallel
+ data-structures such as [[scala.collection.parallel.mutable.ParArray `ParArray`]],
+ [[scala.collection.parallel.mutable.ParHashMap `ParHashMap`]],
+ [[scala.collection.parallel.mutable.ParTrieMap `ParTrieMap`]] or
+ [[scala.collection.parallel.mutable.ParHashSet `ParHashSet`]]
+ - [[scala.concurrent `scala.concurrent`]] - Primitives for concurrent programming such as
+ [[scala.concurrent.Future `Futures`]] and [[scala.concurrent.Promise `Promises`]]
+ - [[scala.io `scala.io`]] - Input and output operations
+ - [[scala.math `scala.math`]] - Basic math functions and additional numeric types like
+ [[scala.math.BigInt `BigInt`]] and [[scala.math.BigDecimal `BigDecimal`]]
+ - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system
+ - [[scala.util.matching `scala.util.matching`]] - [[scala.util.matching.Regex Regular expressions]]
+
+Other packages exist. See the complete list on the left.
+
+Additional parts of the standard library are shipped as separate libraries. These include:
+
+ - [[scala.reflect `scala.reflect`]] - Scala's reflection API (scala-reflect.jar)
+ - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar)
+ - [[scala.swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar)
+ - [[scala.util.continuations `scala.util.continuations`]] - Delimited continuations using continuation-passing-style
+ (scala-continuations-library.jar, scala-continuations-plugin.jar)
+ - [[scala.util.parsing `scala.util.parsing`]] - [[scala.util.parsing.combinator Parser combinators]], including an
+ example implementation of a [[scala.util.parsing.json JSON parser]] (scala-parser-combinators.jar)
+ - [[scala.actors `scala.actors`]] - Actor-based concurrency (deprecated and replaced by Akka actors,
+ scala-actors.jar)
== Automatic imports ==
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index e265beca1b..c3bad60072 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -38,8 +38,8 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* Example:
*
* {{{
- * scala> val x = LinkedList(1, 2, 3, 4, 5)
- * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4, 5)
+ * scala> val x = List(1, 2, 3, 4, 5)
+ * x: List[Int] = List(1, 2, 3, 4, 5)
*
* scala> x(3)
* res1: Int = 4
@@ -302,14 +302,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* Example:
* {{{
- * scala> val x = LinkedList(1)
- * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ * scala> val x = List(1)
+ * x: List[Int] = List(1)
*
* scala> val y = 2 +: x
- * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1)
+ * y: List[Int] = List(2, 1)
*
* scala> println(x)
- * LinkedList(1)
+ * List(1)
* }}}
*
* @return a new $coll consisting of `elem` followed
@@ -335,17 +335,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* Example:
* {{{
- * scala> import scala.collection.mutable.LinkedList
- * import scala.collection.mutable.LinkedList
- *
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
- *
+ * scala> val a = List(1)
+ * a: List[Int] = List(1)
+ *
* scala> val b = a :+ 2
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- *
+ * b: List[Int] = List(1, 2)
+ *
* scala> println(a)
- * LinkedList(1)
+ * List(1)
* }}}
*
* @return a new $coll consisting of
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index a0c519884c..ca098e57b9 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -267,20 +267,20 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
*
* Example:
* {{{
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
- *
- * scala> val b = LinkedList(2)
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
- *
+ * scala> val a = List(1)
+ * a: List[Int] = List(1)
+ *
+ * scala> val b = List(2)
+ * b: List[Int] = List(2)
+ *
* scala> val c = a ++ b
- * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- *
- * scala> val d = LinkedList('a')
- * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
- *
+ * c: List[Int] = List(1, 2)
+ *
+ * scala> val d = List('a')
+ * d: List[Char] = List(a)
+ *
* scala> val e = c ++ d
- * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+ * e: List[AnyVal] = List(1, 2, a)
* }}}
*
* @return a new $coll which contains all elements of this $coll
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a9fe279599..01d179aeb6 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -130,8 +130,8 @@ trait GenTraversableOnce[+A] extends Any {
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (5 /: a)(_+_)
* b: Int = 15
@@ -167,8 +167,8 @@ trait GenTraversableOnce[+A] extends Any {
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (a :\ 5)(_+_)
* b: Int = 15
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 26af32046c..072fd3da44 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -320,14 +320,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
- * b: StringBuilder =
- *
- * scala> a.addString(b, "LinkedList(", ", ", ")")
- * res1: StringBuilder = LinkedList(1, 2, 3, 4)
+ * b: StringBuilder =
+ *
+ * scala> a.addString(b , "List(" , ", " , ")")
+ * res5: StringBuilder = List(1, 2, 3, 4)
* }}}
*
* @param b the string builder to which elements are appended.
@@ -362,9 +362,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
@@ -385,14 +385,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
* scala> val h = a.addString(b)
- * b: StringBuilder = 1234
+ * h: StringBuilder = 1234
* }}}
* @param b the string builder to which elements are appended.
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 56f1802509..14ae57c43a 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -102,8 +102,14 @@ private[collection] trait Wrappers {
override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
+ // Note various overrides to avoid performance gotchas.
class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
self =>
+ override def contains(o: Object): Boolean = {
+ try { underlying.contains(o.asInstanceOf[A]) }
+ catch { case cce: ClassCastException => false }
+ }
+ override def isEmpty = underlying.isEmpty
def size = underlying.size
def iterator = new ju.Iterator[A] {
val ui = underlying.iterator
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 486c2b6c8f..249d76584d 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -175,9 +175,36 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
catch { case _: ClassCastException => false }
final override def sum[B >: T](implicit num: Numeric[B]): B = {
- if (isEmpty) this.num fromInt 0
- else if (numRangeElements == 1) head
- else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
+ // arithmetic series formula can be used for regular addition
+ if ((num eq scala.math.Numeric.IntIsIntegral)||
+ (num eq scala.math.Numeric.BigIntIsIntegral)||
+ (num eq scala.math.Numeric.ShortIsIntegral)||
+ (num eq scala.math.Numeric.ByteIsIntegral)||
+ (num eq scala.math.Numeric.CharIsIntegral)||
+ (num eq scala.math.Numeric.LongIsIntegral)||
+ (num eq scala.math.Numeric.FloatAsIfIntegral)||
+ (num eq scala.math.Numeric.BigDecimalIsFractional)||
+ (num eq scala.math.Numeric.DoubleAsIfIntegral)) {
+ val numAsIntegral = num.asInstanceOf[Integral[B]]
+ import numAsIntegral._
+ if (isEmpty) num fromInt 0
+ else if (numRangeElements == 1) head
+ else ((num fromInt numRangeElements) * (head + last) / (num fromInt 2))
+ } else {
+ // user provided custom Numeric, we cannot rely on arithmetic series formula
+ if (isEmpty) num.zero
+ else {
+ var acc = num.zero
+ var i = head
+ var idx = 0
+ while(idx < length) {
+ acc = num.plus(acc, i)
+ i = i + step
+ idx = idx + 1
+ }
+ acc
+ }
+ }
}
override lazy val hashCode = super.hashCode()
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 589661a343..3a64820be6 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -188,7 +188,10 @@ extends scala.collection.AbstractSeq[T]
val s = start + _start
val e = if (_end == UndeterminedEnd) _end else start + _end
var f = first1
- while (f.end <= s && !f.isLast) f = f.next
+ while (f.end <= s && !f.isLast) {
+ if (f.next eq null) f.addMore(more)
+ f = f.next
+ }
new PagedSeq(more, f, s, e)
}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 00f398a4b0..786b18cd21 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -259,9 +259,24 @@ extends scala.collection.AbstractSeq[Int]
final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0)
final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
- if (isEmpty) 0
- else if (numRangeElements == 1) head
- else (numRangeElements.toLong * (head + last) / 2).toInt
+ if (num eq scala.math.Numeric.IntIsIntegral) {
+ // this is normal integer range with usual addition. arithmetic series formula can be used
+ if (isEmpty) 0
+ else if (numRangeElements == 1) head
+ else (numRangeElements.toLong * (head + last) / 2).toInt
+ } else {
+ // user provided custom Numeric, we cannot rely on arithmetic series formula
+ if (isEmpty) num.toInt(num.zero)
+ else {
+ var acc = num.zero
+ var i = head
+ while(i != terminalElement) {
+ acc = num.plus(acc, i)
+ i = i + step
+ }
+ num.toInt(acc)
+ }
+ }
}
override def toIterable = this
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 7129f22f60..986cd0390f 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -35,15 +35,3 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
}
-
-// Manual stand-ins for formerly specialized variations.
-// Not comprehensive, only sufficent to run scala-check built scala 2.11.0-M5
-// TODO Scala 2.10.0.M6 Remove this once scalacheck is published against M6.
-private[runtime] abstract class AbstractPartialFunction$mcIL$sp extends scala.runtime.AbstractPartialFunction[Any, Int] {
- override def apply(x: Any): Int = apply$mcIL$sp(x)
- def apply$mcIL$sp(x: Any): Int = applyOrElse(x, PartialFunction.empty)
-}
-private[runtime] abstract class AbstractPartialFunction$mcFL$sp extends scala.runtime.AbstractPartialFunction[Any, Float] {
- override def apply(x: Any): Float = apply$mcIL$sp(x)
- def apply$mcIL$sp(x: Any): Float = applyOrElse(x, PartialFunction.empty)
-}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 13f2362d00..d597feb898 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -173,7 +173,7 @@ private[scala] trait PropertiesTrait {
* isJavaAtLeast("1.6") // true
* isJavaAtLeast("1.7") // true
* isJavaAtLeast("1.8") // false
- * }}
+ * }}}
*/
def isJavaAtLeast(version: String): Boolean = {
def parts(x: String) = {
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index f48b99bd5a..6b3be8b77f 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -39,16 +39,16 @@ object scala extends Command {
CmdOptionBound("howtorun:", Argument("how")),
"How to execute " & Argument("torun") & ", if it is present. " &
"Options for " & Argument("how") & " are " & Mono("guess") &
- " (the default), " & Mono("script") & ", and " & Mono("object") &
+ " (the default), " & Mono("script") & ", " & Mono("jar") & ", and " & Mono("object") &
"."),
Definition(
- CmdOption("i"),
+ CmdOption("i", Argument("file")),
"Requests that a file be pre-loaded. It is only " &
"meaningful for interactive shells."),
Definition(
- CmdOption("e"),
+ CmdOption("e", Argument("string")),
"Requests that its argument be executed as Scala code."),
Definition(
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 1c0c7c4a96..52e918595c 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -109,6 +109,9 @@ object scalac extends Command {
CmdOption("extdirs", Argument("dirs")),
"Override location of installed extensions."),
Definition(
+ CmdOption("feature"),
+ "Emit warning and location for usages of features that should be imported explicitly."),
+ Definition(
CmdOptionBound("g:", "{none,source,line,vars,notailcalls}"),
SeqPara(
Mono("\"none\"") & " generates no debugging info,",
@@ -128,6 +131,9 @@ object scalac extends Command {
CmdOption("javaextdirs", Argument("path")),
"Override Java extdirs classpath."),
Definition(
+ CmdOptionBound("language:", Argument("feature")),
+ "Enable one or more language features."),
+ Definition(
CmdOption("no-specialization"),
"Ignore " & MItalic("@specialize") & " annotations."),
Definition(
@@ -146,6 +152,12 @@ object scalac extends Command {
CmdOption("sourcepath", Argument("path")),
"Specify location(s) of source files."),
Definition(
+ CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7}"),
+ SeqPara(
+ Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),",
+ Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),",
+ Mono("\"jvm-1.7\"") & " target JVM 1.7,")),
+ Definition(
CmdOption("toolcp", Argument("path")),
"Add to the runner classpath."),
Definition(
@@ -159,6 +171,12 @@ object scalac extends Command {
CmdOption("uniqid"),
"Uniquely tag all identifiers in debugging output."),
Definition(
+ CmdOption("usejavacp"),
+ "Utilize the java.class.path in classpath resolution."),
+ Definition(
+ CmdOption("usemanifestcp"),
+ "Utilize the manifest in classpath resolution."),
+ Definition(
CmdOption("verbose"),
"Output messages about what the compiler is doing"),
Definition(
@@ -175,11 +193,11 @@ object scalac extends Command {
Section("Advanced Options",
DefinitionList(
Definition(
- CmdOption("Xcheck-null"),
- "Warn upon selection of nullable reference"),
- Definition(
CmdOption("Xcheckinit"),
"Wrap field accessors to throw an exception on uninitialized access."),
+ Definition(
+ CmdOption("Xdev"),
+ "Enable warnings for developers working on the Scala compiler"),
Definition(
CmdOption("Xdisable-assertions"),
"Generate no assertions and assumptions"),
@@ -193,6 +211,9 @@ object scalac extends Command {
Definition(
CmdOption("Xfatal-warnings"),
"Fail the compilation if there are any warnings."),
+ Definition(
+ CmdOption("Xfull-lubs"),
+ "Retain pre 2.10 behavior of less aggressive truncation of least upper bounds."),
Definition(
CmdOption("Xfuture"),
"Turn on future language features."),
@@ -202,18 +223,39 @@ object scalac extends Command {
Definition(
CmdOption("Xlint"),
"Enable recommended additional warnings."),
+ Definition(
+ CmdOption("Xlog-free-terms"),
+ "Print a message when reification creates a free term."),
+ Definition(
+ CmdOption("Xlog-free-types"),
+ "Print a message when reification resorts to generating a free type."),
+ Definition(
+ CmdOption("Xlog-implicit-conversions"),
+ "Print a message whenever an implicit conversion is inserted."),
Definition(
CmdOption("Xlog-implicits"),
"Show more detail on why some implicits are not applicable."),
+ Definition(
+ CmdOption("Xlog-reflective-calls"),
+ "Print a message when a reflective method call is generated."),
+ Definition(
+ CmdOptionBound("Xmacro-settings:", Argument("option")),
+ "Custom settings for macros."),
+ Definition(
+ CmdOption("Xmain-class", Argument("path")),
+ "Class for manifest's Main-Class entry (only useful with -d <jar>)."),
Definition(
CmdOption("Xmax-classfile-name", Argument("n")),
"Maximum filename length for generated classes."),
Definition(
- CmdOption("Xmigration"),
- "Warn about constructs whose behavior may have changed between 2.7 and 2.8."),
+ CmdOptionBound("Xmigration:", Argument("version")),
+ "Warn about constructs whose behavior may have changed since" & Argument("version") & "."),
Definition(
CmdOption("Xno-forwarders"),
"Do not generate static forwarders in mirror classes."),
+ Definition(
+ CmdOption("Xno-patmat-analysis"),
+ "Don't perform exhaustivity/unreachability analysis. Also, ignore " & MItalic("@switch") & " annotation."),
Definition(
CmdOption("Xno-uescape"),
"Disable handling of " & BSlash & "u unicode escapes"),
@@ -221,26 +263,26 @@ object scalac extends Command {
CmdOption("Xnojline"),
"Do not use JLine for editing."),
Definition(
- CmdOptionBound("Xplugin:", Argument("file")),
- "Load a plugin from a file"),
+ CmdOptionBound("Xplugin:", Argument("paths")),
+ "Load a plugin from each classpath."),
Definition(
CmdOptionBound("Xplugin-disable:", Argument("plugin")),
- "Disable a plugin"),
+ "Disable plugins by name."),
Definition(
CmdOption("Xplugin-list"),
- "Print a synopsis of loaded plugins"),
+ "Print a synopsis of loaded plugins."),
Definition(
CmdOptionBound("Xplugin-require:", Argument("plugin")),
- "Abort unless the given plugin(s) are available"),
+ "Abort if a named plugin is not loaded."),
Definition(
CmdOption("Xpluginsdir", Argument("path")),
- "Path to search compiler plugins."),
+ "Path to search for plugin archives."),
Definition(
CmdOptionBound("Xprint:", Argument("phases")),
"Print out program after " & Argument("phases") & " (see below)."),
Definition(
- CmdOption("Xprint-icode"),
- "Log internal icode to *.icode files."),
+ CmdOptionBound("Xprint-icode", "[:" & Argument("phases") & "]"),
+ "Log internal icode to *.icode files after" & Argument("phases") & " (default: icode)."),
Definition(
CmdOption("Xprint-pos"),
"Print tree positions, as offsets."),
@@ -269,9 +311,12 @@ object scalac extends Command {
Definition(
CmdOption("Xsource-reader", Argument("classname")),
"Specify a custom method for reading source files."),
+ Definition(
+ CmdOption("Xstrict-inference"),
+ "Don't infer known-unsound types."),
Definition(
CmdOption("Xverify"),
- "Verify generic signatures in generated bytecode."),
+ "Verify generic signatures in generated bytecode (asm backend only)."),
Definition(
CmdOption("Y"),
"Print a synopsis of private options.")
@@ -281,65 +326,101 @@ object scalac extends Command {
Section("Compilation Phases",
DefinitionList(
Definition(
- MItalic("initial"),
- "initializing compiler"),
- Definition(
- MItalic("parse"),
- "parse source files"),
+ MItalic("parser"),
+ "parse source into ASTs, perform simple desugaring"),
Definition(
MItalic("namer"),
- "create symbols"),
+ "resolve names, attach symbols to named trees"),
+ Definition(
+ MItalic("packageobjects"),
+ "load package objects"),
+ Definition(
+ MItalic("typer"),
+ "the meat and potatoes: type the trees"),
+ Definition(
+ MItalic("patmat"),
+ "translate match expressions"),
+ Definition(
+ MItalic("superaccessors"),
+ "add super accessors in traits and nested classes"),
+ Definition(
+ MItalic("extmethods"),
+ "add extension methods for inline classes"),
+ Definition(
+ MItalic("pickler"),
+ "serialize symbol tables"),
+ Definition(
+ MItalic("refchecks"),
+ "reference/override checking, translate nested objects"),
+ Definition(
+ MItalic("selectiveanf"),
+ "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"),
+ Definition(
+ MItalic("selectivecps"),
+ MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"),
+ Definition(
+ MItalic("uncurry"),
+ "uncurry, translate function values to anonymous classes"),
Definition(
- MItalic("analyze"),
- "name and type analysis"),
+ MItalic("tailcalls"),
+ "replace tail calls by jumps"),
Definition(
- MItalic("refcheck"),
- "reference checking"),
+ MItalic("specialize"),
+ MItalic("@specialized") & "-driven class and method specialization"),
Definition(
- MItalic("uncurry"),
- "uncurry function types and applications"),
+ MItalic("explicitouter"),
+ "this refs to outer pointers, translate patterns"),
+ Definition(
+ MItalic("erasure"),
+ "erase types, add interfaces for traits"),
+ Definition(
+ MItalic("posterasure"),
+ "clean up erased inline classes"),
+ Definition(
+ MItalic("lazyvals"),
+ "allocate bitmaps, translate lazy vals into lazified defs"),
Definition(
MItalic("lambdalift"),
- "lambda lifter"),
+ "move nested functions to top level"),
Definition(
- MItalic("typesasvalues"),
- "represent types as values"),
+ MItalic("constructors"),
+ "move field definitions into constructors"),
Definition(
- MItalic("addaccessors"),
- "add accessors for constructor arguments"),
+ MItalic("flatten"),
+ "eliminate inner classes"),
Definition(
- MItalic("explicitouterclasses"),
- "make links from inner classes to enclosing one explicit"),
+ MItalic("mixin"),
+ "mixin composition"),
Definition(
- MItalic("addconstructors"),
- "add explicit constructor for each class"),
+ MItalic("cleanup"),
+ "platform-specific cleanups, generate reflective calls"),
Definition(
- MItalic("tailcall"),
- "add tail-calls"),
+ MItalic("delambdafy"),
+ "remove lambdas"),
Definition(
- MItalic("wholeprog"),
- "perform whole program analysis"),
+ MItalic("icode"),
+ "generate portable intermediate code"),
Definition(
- MItalic("addinterfaces"),
- "add one interface per class"),
+ MItalic("inliner"),
+ "optimization: do inlining"),
Definition(
- MItalic("expandmixins"),
- "expand mixins by code copying"),
+ MItalic("inlineHandlers"),
+ "optimization: inline exception handlers"),
Definition(
- MItalic("boxing"),
- "makes boxing explicit"),
+ MItalic("closelim"),
+ "optimization: eliminate uncalled closures"),
Definition(
- MItalic("erasure"),
- "type eraser"),
+ MItalic("constopt"),
+ "optimization: optimize null and other constants"),
Definition(
- MItalic("icode"),
- "generate icode"),
+ MItalic("dce"),
+ "optimization: eliminate dead code"),
Definition(
- MItalic("codegen"),
- "enable code generation"),
+ MItalic("jvm"),
+ "generate JVM bytecode"),
Definition(
MItalic("terminal"),
- "compilation terminated"),
+ "the last phase in the compiler chain"),
Definition(
MItalic("all"),
"matches all phases"))))
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 3d5a213f2f..54b65166d8 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -169,6 +169,14 @@ trait FlagSets { self: Universe =>
/** Flag indicating that tree was generated by the compiler */
val SYNTHETIC: FlagSet
+
+ /** Flag indicating that tree represents an enum.
+ *
+ * It can only appear at
+ * - the enum's class
+ * - enum constants
+ **/
+ val ENUM: FlagSet
}
/** The empty set of flags
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 19c67879f5..0ca8611719 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -166,9 +166,10 @@ trait BaseTypeSeqs {
val index = new Array[Int](nparents)
var i = 0
for (p <- parents) {
+ val parentBts = p.dealias.baseTypeSeq // dealias need for SI-8046.
pbtss(i) =
- if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
- else p.baseTypeSeq
+ if (parentBts eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
+ else parentBts
index(i) = 0
i += 1
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 29a8e62b6e..0091f50fc6 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -363,6 +363,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable]
lazy val JavaNumberClass = requiredClass[java.lang.Number]
+ lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]]
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
@@ -807,46 +808,32 @@ trait Definitions extends api.StandardDefinitions {
def byNameType(arg: Type) = appliedType(ByNameParamClass, arg)
def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp)
def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg)
+ def optionType(tp: Type) = appliedType(OptionClass, tp)
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
// FYI the long clunky name is because it's really hard to put "get" into the
// name of a method without it sounding like the method "get"s something, whereas
// this method is about a type member which just happens to be named get.
- def typeOfMemberNamedGet(tp: Type) = resultOfMatchingMethod(tp, nme.get)()
- def typeOfMemberNamedHead(tp: Type) = resultOfMatchingMethod(tp, nme.head)()
- def typeOfMemberNamedApply(tp: Type) = resultOfMatchingMethod(tp, nme.apply)(IntTpe)
- def typeOfMemberNamedDrop(tp: Type) = resultOfMatchingMethod(tp, nme.drop)(IntTpe)
- def typeOfMemberNamedGetOrSelf(tp: Type) = typeOfMemberNamedGet(tp) orElse tp
- def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
- def typesOfCaseAccessors(tp: Type) = getterMemberTypes(tp, tp.typeSymbol.caseFieldAccessors)
-
- /** If this is a case class, the case field accessors (which may be an empty list.)
- * Otherwise, if there are any product selectors, that list.
- * Otherwise, a list containing only the type itself.
- */
- def typesOfSelectorsOrSelf(tp: Type): List[Type] = (
- if (tp.typeSymbol.isCase)
- typesOfCaseAccessors(tp)
- else typesOfSelectors(tp) match {
- case Nil => tp :: Nil
- case tps => tps
- }
- )
-
- /** If the given type has one or more product selectors, the type of the last one.
- * Otherwise, the type itself.
- */
- def typeOfLastSelectorOrSelf(tp: Type) = typesOfSelectorsOrSelf(tp).last
-
- def elementTypeOfLastSelectorOrSelf(tp: Type) = {
- val last = typeOfLastSelectorOrSelf(tp)
- ( typeOfMemberNamedHead(last)
- orElse typeOfMemberNamedApply(last)
- orElse elementType(ArrayClass, last)
- )
+ def typeOfMemberNamedGet(tp: Type) = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)())
+ def typeOfMemberNamedHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)())
+ def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe))
+ def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe))
+ def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
+ // SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible)
+ // extractor to limit exposure to regressions like the reported problem with existentials.
+ // TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala
+ private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match {
+ case x :: Nil => x
+ case _ => or
}
+ // Can't only check for _1 thanks to pos/t796.
+ def hasSelectors(tp: Type) = (
+ (tp.members containsName nme._1)
+ && (tp.members containsName nme._2)
+ )
+
/** Returns the method symbols for members _1, _2, ..., _N
* which exist in the given type.
*/
@@ -856,7 +843,9 @@ trait Definitions extends api.StandardDefinitions {
case m if m.paramss.nonEmpty => Nil
case m => m :: loop(n + 1)
}
- loop(1)
+ // Since ErrorType always returns a symbol from a call to member, we
+ // had better not start looking for _1, _2, etc. expecting it to run out.
+ if (tpe.isErroneous) Nil else loop(1)
}
/** If `tp` has a term member `name`, the first parameter list of which
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 84825ff2da..799f85054a 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -43,5 +43,6 @@ trait FlagSets extends api.FlagSets { self: SymbolTable =>
val PRESUPER : FlagSet = Flags.PRESUPER
val DEFAULTINIT : FlagSet = Flags.DEFAULTINIT
val SYNTHETIC : FlagSet = Flags.SYNTHETIC
+ val ENUM : FlagSet = Flags.ENUM
}
}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index dcdf6728ce..11c1d66190 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -63,7 +63,7 @@ import scala.collection.{ mutable, immutable }
// 45: SYNCHRONIZED/M
// 46: ARTIFACT
// 47: DEFAULTMETHOD/M
-// 48:
+// 48: ENUM
// 49:
// 50:
// 51: lateDEFERRED
@@ -119,6 +119,7 @@ class ModifierFlags {
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
+ final val ENUM = 1L << 48 // symbol is an enum
/** Symbols which are marked ARTIFACT. (Expand this list?)
*
@@ -142,7 +143,7 @@ class ModifierFlags {
}
object ModifierFlags extends ModifierFlags
-/** All flags and associated operatins */
+/** All flags and associated operations */
class Flags extends ModifierFlags {
final val METHOD = 1 << 6 // a method
final val MODULE = 1 << 8 // symbol is module or class implementing a module
@@ -446,7 +447,7 @@ class Flags extends ModifierFlags {
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case ARTIFACT => "<artifact>" // (1L << 46)
case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
- case 0x1000000000000L => "" // (1L << 48)
+ case ENUM => "<enum>" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
case `lateDEFERRED` => "<latedeferred>" // (1L << 51)
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index ecbf839bab..1131c94da0 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -82,6 +82,7 @@ trait HasFlags {
def hasAbstractFlag = hasFlag(ABSTRACT)
def hasAccessorFlag = hasFlag(ACCESSOR)
def hasDefault = hasFlag(DEFAULTPARAM) && hasFlag(METHOD | PARAM) // Second condition disambiguates with TRAIT
+ def hasEnumFlag = hasFlag(ENUM)
def hasLocalFlag = hasFlag(LOCAL)
def hasModuleFlag = hasFlag(MODULE)
def hasPackageFlag = hasFlag(PACKAGE)
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index ea6afa7349..a54aa1f6e8 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -686,6 +686,7 @@ trait StdNames {
val inlinedEquals: NameType = "inlinedEquals"
val isArray: NameType = "isArray"
val isDefinedAt: NameType = "isDefinedAt"
+ val isEmpty: NameType = "isEmpty"
val isInstanceOf_ : NameType = "isInstanceOf"
val isInstanceOf_Ob : NameType = "$isInstanceOf"
val java: NameType = "java"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index c46a559d6d..bed8310767 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -356,16 +356,18 @@ abstract class SymbolTable extends macros.Universe
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
- private val caches = WeakHashSet[Clearable]()
+ import java.lang.ref.WeakReference
+ private var caches = List[WeakReference[Clearable]]()
def recordCache[T <: Clearable](cache: T): T = {
- caches += cache
+ caches ::= new WeakReference(cache)
cache
}
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
- caches foreach (_.clear)
+ caches foreach (ref => Option(ref.get).foreach(_.clear))
+ caches = caches.filterNot(_.get == null)
}
def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
@@ -376,9 +378,9 @@ abstract class SymbolTable extends macros.Universe
val NoCached: T = null.asInstanceOf[T]
var cached: T = NoCached
var cachedRunId = NoRunId
- caches += new Clearable {
+ recordCache(new Clearable {
def clear(): Unit = cached = NoCached
- }
+ })
() => {
if (currentRunId != cachedRunId || cached == NoCached) {
cached = f
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 8fdf4dc27a..497a7c91b1 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -612,8 +612,8 @@ abstract class TreeInfo {
def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length
def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match {
- case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
- case xs => xs
+ case build.SyntacticTuple(xs) :: Nil => xs
+ case xs => xs
}
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 99e6ae633f..e9230aceee 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -8,7 +8,6 @@ package reflect
package internal
import scala.collection.{ mutable, immutable, generic }
-import generic.Clearable
import scala.ref.WeakReference
import mutable.ListBuffer
import Flags._
@@ -1999,7 +1998,9 @@ trait Types
if (sym.typeParams.size != args.size)
devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
- asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
+ val GenPolyType(tparams, result) = asSeenFromOwner(tp)
+ assert((tparams eq Nil) || tparams == sym.typeParams, (tparams, sym.typeParams))
+ result.instantiateTypeParams(sym.typeParams, args)
}
// note: does not go through typeRef. There's no need to because
@@ -2309,7 +2310,14 @@ trait Types
}
thisInfo.decls
}
- protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
+ protected[Types] def baseTypeSeqImpl: BaseTypeSeq =
+ if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType))
+ // SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise.
+ transform(sym.info).baseTypeSeq
+ else
+ // Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map
+ // of the BTS of the referenced symbol.
+ sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
@@ -3660,7 +3668,11 @@ trait Types
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.WeakHashSet[Type](initialUniquesCapacity)
- perRunCaches.recordCache(uniques)
+ // JZ: We used to register this as a perRunCache so it would be cleared eagerly at
+ // the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
+ // When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
+ // I've removed the registration for now. I don't think its particularly harmful anymore
+ // as a) this is now a weak set, and b) it is discarded completely before the next run.
uniqueRunId = currentRunId
}
(uniques findEntryOrUpdate tp).asInstanceOf[T]
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index b60fecd66e..2623a47be6 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -170,11 +170,20 @@ trait TypeComparers {
// corresponds does not check length of two sequences before checking the predicate,
// but SubstMap assumes it has been checked (SI-2956)
( sameLength(tparams1, tparams2)
- && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info))
+ && (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= subst(p2.info))
&& (res1 =:= subst(res2))
)
}
+ // SI-2066 This prevents overrides with incompatible variance in higher order type parameters.
+ private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = {
+ def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod)
+ ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
+ }
+
+ private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) =
+ methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
+
def isSameType2(tp1: Type, tp2: Type): Boolean = {
def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
@@ -327,7 +336,10 @@ trait TypeComparers {
val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
- def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
+ def cmp(p1: Symbol, p2: Symbol) = (
+ methodHigherOrderTypeParamsSubVariance(p2, p1)
+ && sub2(p2.info) <:< sub1(p1.info)
+ )
(tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index 7ba8a0a295..b3b20a888e 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -273,6 +273,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.ComparableClass
definitions.JavaCloneableClass
definitions.JavaNumberClass
+ definitions.JavaEnumClass
definitions.RemoteInterfaceClass
definitions.RemoteExceptionClass
definitions.ByNameParamClass
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
deleted file mode 100644
index 578ef71e09..0000000000
--- a/src/swing/scala/swing/Publisher.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.swing
-
-import scala.collection.mutable
-import mutable.Buffer
-import event.Event
-
-/** <p>
- * Notifies registered reactions when an event is published. Publishers are
- * also reactors and listen to themselves per default as a convenience.
- * </p>
- * <p>
- * In order to reduce memory leaks, reactions are weakly referenced by default,
- * unless they implement <code>Reactions.StronglyReferenced</code>. That way,
- * the lifetime of reactions are more easily bound to the registering object,
- * which are reactors in common client code and hold strong references to their
- * reactions. As a result, reactors can be garbage collected even though they
- * still have reactions registered at some publisher, but not vice versa
- * since reactors (strongly) reference publishers they are interested in.
- * </p>
- */
-trait Publisher extends Reactor {
- import Reactions._
-
- protected val listeners = new RefSet[Reaction] {
- import scala.ref._
- val underlying = new mutable.HashSet[Reference[Reaction]]
- protected def Ref(a: Reaction) = a match {
- case a: StronglyReferenced => new StrongReference[Reaction](a) with super.Ref[Reaction]
- case _ => new WeakReference[Reaction](a, referenceQueue) with super.Ref[Reaction]
- }
- }
-
- private[swing] def subscribe(listener: Reaction) { listeners += listener }
- private[swing] def unsubscribe(listener: Reaction) { listeners -= listener }
-
- /**
- * Notify all registered reactions.
- */
- def publish(e: Event) { for (l <- listeners) if (l.isDefinedAt(e)) l(e) }
-
- listenTo(this)
-}
-
-/**
- * A publisher that subscribes itself to an underlying event source not before the first
- * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled.
- */
-private[swing] trait LazyPublisher extends Publisher {
- import Reactions._
-
- protected def onFirstSubscribe()
- protected def onLastUnsubscribe()
-
- override def subscribe(listener: Reaction) {
- if(listeners.size == 1) onFirstSubscribe()
- super.subscribe(listener)
- }
- override def unsubscribe(listener: Reaction) {
- super.unsubscribe(listener)
- if(listeners.size == 1) onLastUnsubscribe()
- }
-}
-
-
-
-import scala.ref._
-
-private[swing] trait SingleRefCollection[+A <: AnyRef] extends Iterable[A] { self =>
-
- trait Ref[+A <: AnyRef] extends Reference[A] {
- override def hashCode() = get match {
- case Some(x) => x.##
- case _ => 0
- }
- override def equals(that: Any) = that match {
- case that: ReferenceWrapper[_] =>
- val v1 = this.get
- val v2 = that.get
- v1 == v2
- case _ => false
- }
- }
-
- //type Ref <: Reference[A] // TODO: could use higher kinded types, but currently crashes
- protected[this] def Ref(a: A): Ref[A]
- protected[this] val referenceQueue = new ReferenceQueue[A]
-
- protected val underlying: Iterable[Reference[A]]
-
- def purgeReferences() {
- var ref = referenceQueue.poll
- while (ref != None) {
- removeReference(ref.get.asInstanceOf[Reference[A]])
- ref = referenceQueue.poll
- }
- }
-
- protected[this] def removeReference(ref: Reference[A])
-
- def iterator = new Iterator[A] {
- private val elems = self.underlying.iterator
- private var hd: A = _
- private var ahead: Boolean = false
- private def skip(): Unit =
- while (!ahead && elems.hasNext) {
- // make sure we have a reference to the next element,
- // otherwise it might be garbage collected
- val next = elems.next.get
- ahead = next != None
- if (ahead) hd = next.get
- }
- def hasNext: Boolean = { skip; ahead }
- def next(): A =
- if (hasNext) { ahead = false; hd }
- else throw new NoSuchElementException("next on empty iterator")
- }
-}
-
-private[swing] class StrongReference[+T <: AnyRef](value: T) extends Reference[T] {
- private[this] var ref: Option[T] = Some(value)
- def isValid: Boolean = ref != None
- def apply(): T = ref.get
- def get : Option[T] = ref
- override def toString = get.map(_.toString).getOrElse("<deleted>")
- def clear() { ref = None }
- def enqueue(): Boolean = false
- def isEnqueued(): Boolean = false
- }
-
-abstract class RefBuffer[A <: AnyRef] extends Buffer[A] with SingleRefCollection[A] { self =>
- protected val underlying: Buffer[Reference[A]]
-
- def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
- def +=:(el: A) = { purgeReferences(); Ref(el) +=: underlying; this }
- def remove(el: A) { underlying -= Ref(el); purgeReferences(); }
- def remove(n: Int) = { val el = apply(n); remove(el); el }
- def insertAll(n: Int, iter: Iterable[A]) {
- purgeReferences()
- underlying.insertAll(n, iter.view.map(Ref(_)))
- }
- def update(n: Int, el: A) { purgeReferences(); underlying(n) = Ref(el) }
- def apply(n: Int) = {
- purgeReferences()
- var el = underlying(n).get
- while (el == None) {
- purgeReferences(); el = underlying(n).get
- }
- el.get
- }
-
- def length = { purgeReferences(); underlying.length }
- def clear() { underlying.clear(); purgeReferences() }
-
- protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
-
-private[swing] abstract class RefSet[A <: AnyRef] extends mutable.Set[A] with SingleRefCollection[A] { self =>
- protected val underlying: mutable.Set[Reference[A]]
-
- def -=(el: A): this.type = { underlying -= Ref(el); purgeReferences(); this }
- def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
- def contains(el: A): Boolean = { purgeReferences(); underlying.contains(Ref(el)) }
- override def size = { purgeReferences(); underlying.size }
-
- protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
diff --git a/test/files/neg/t2066.check b/test/files/neg/t2066.check
new file mode 100644
index 0000000000..efade87e26
--- /dev/null
+++ b/test/files/neg/t2066.check
@@ -0,0 +1,21 @@
+t2066.scala:6: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+ override def f[T[+_]] = ()
+ ^
+t2066.scala:10: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+ override def f[T[-_]] = ()
+ ^
+t2066.scala:23: error: overriding method f in trait A2 of type [T[+_]]=> Unit;
+ method f has incompatible type
+ override def f[T[-_]] = ()
+ ^
+t2066.scala:45: error: overriding method f in trait A4 of type [T[X[+_]]]=> Unit;
+ method f has incompatible type
+ override def f[T[X[_]]] = ()
+ ^
+t2066.scala:53: error: overriding method f in trait A5 of type [T[X[-_]]]=> Unit;
+ method f has incompatible type
+ override def f[T[X[_]]] = ()
+ ^
+5 errors found
diff --git a/test/files/neg/t2066.scala b/test/files/neg/t2066.scala
new file mode 100644
index 0000000000..7f15d39c67
--- /dev/null
+++ b/test/files/neg/t2066.scala
@@ -0,0 +1,70 @@
+trait A1 {
+ def f[T[_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[+_]] = ()
+}
+
+trait C1 extends A1 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A2 {
+ def f[T[+_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[_]] = () // okay
+}
+
+trait C2 extends A2 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+ def f[T[-_]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[_]] = () // okay
+}
+
+trait C3 extends A3 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A4 {
+ def f[T[X[+_]]] = ()
+}
+
+trait B4 extends A4 {
+ override def f[T[X[_]]] = ()
+}
+
+trait A5 {
+ def f[T[X[-_]]] = ()
+}
+
+trait B5 extends A5 {
+ override def f[T[X[_]]] = ()
+}
+
+
+
+trait A6 {
+ def f[T[X[_]]] = ()
+}
+
+trait B6 extends A6 {
+ override def f[T[X[+_]]] = () // okay
+}
+trait C6 extends A6 {
+ override def f[T[X[_]]] = () // okay
+}
+trait D6 extends A6 {
+ override def f[T[X[-_]]] = ()
+}
diff --git a/test/files/neg/t2066b.check b/test/files/neg/t2066b.check
new file mode 100644
index 0000000000..097c44fef3
--- /dev/null
+++ b/test/files/neg/t2066b.check
@@ -0,0 +1,5 @@
+t2066b.scala:7: error: overriding method f in trait A of type [T[_]](x: T[Int])T[Any];
+ method f has incompatible type
+ def f[T[+_]](x : T[Int]) : T[Any] = x
+ ^
+one error found
diff --git a/test/pending/neg/t2066.scala b/test/files/neg/t2066b.scala
index 46177b19f7..46177b19f7 100644
--- a/test/pending/neg/t2066.scala
+++ b/test/files/neg/t2066b.scala
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
index 95b88a6b3d..00006c08f0 100644
--- a/test/files/neg/t4425.check
+++ b/test/files/neg/t4425.check
@@ -1,12 +1,12 @@
-t4425.scala:3: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425.scala:3: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: Int)(y: Option[Int]): None.type exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
42 match { case _ X _ => () }
^
-t4425.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: Int)(y: Int): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
42 match { case _ X _ => () }
^
-t4425.scala:13: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425.scala:13: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
"" match { case _ X _ => () }
^
diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check
index 1186e8b609..8418b4fd12 100644
--- a/test/files/neg/t4425b.check
+++ b/test/files/neg/t4425b.check
@@ -1,61 +1,49 @@
-t4425b.scala:5: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:5: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println( "" match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:6: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:6: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:7: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:7: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println( "" match { case X(_) => "ok" ; case _ => "fail" })
^
-t4425b.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:8: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
^
-t4425b.scala:9: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:9: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:10: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+t4425b.scala:10: error: object X is not a case class, nor does it have an unapply/unapplySeq member
Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:18: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:18: error: too many patterns for object X: expected 1, found 2
println( "" match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:19: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:19: error: too many patterns for object X: expected 1, found 2
println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:22: error: too many patterns for object X: expected 1, found 2
println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
- println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
- ^
-t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:23: error: too many patterns for object X: expected 1, found 2
println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
- println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
- ^
-t4425b.scala:31: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:31: error: too many patterns for object X offering Nothing: expected 1, found 2
println( "" match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:32: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:32: error: too many patterns for object X offering Nothing: expected 1, found 2
println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
^
-t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:35: error: too many patterns for object X offering Nothing: expected 1, found 2
println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
- println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
- ^
-t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+t4425b.scala:36: error: too many patterns for object X offering Nothing: expected 1, found 2
println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
^
-t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
- println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
- ^
-18 errors found
+14 errors found
diff --git a/test/files/neg/t5903a.check b/test/files/neg/t5903a.check
index 2e5cc87167..34003b0a82 100644
--- a/test/files/neg/t5903a.check
+++ b/test/files/neg/t5903a.check
@@ -1,4 +1,4 @@
-Test_2.scala:4: error: wrong number of patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3
+Test_2.scala:4: error: too many patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3
case nq"$x + $y + $z" => println((x, y))
^
one error found
diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check
new file mode 100644
index 0000000000..77f6b3ccbc
--- /dev/null
+++ b/test/files/neg/t6675b.check
@@ -0,0 +1,37 @@
+t6675b.scala:17: warning: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (SI-6675)
+ def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn
+ ^
+t6675b.scala:19: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: (Int, Int)
+ def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail
+ ^
+t6675b.scala:24: warning: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (SI-6675)
+ def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn
+ ^
+t6675b.scala:26: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: (?A11, ?A12) where type ?A12 <: A (this is a GADT skolem), type ?A11 <: A (this is a GADT skolem)
+ def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail
+ ^
+t6675b.scala:30: warning: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (SI-6675)
+ def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn
+ ^
+t6675b.scala:32: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: ((Int, Int), (Int, Int))
+ def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a } // fail
+ ^
+t6675b.scala:36: warning: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (SI-6675)
+ def f1[A](x: A) = (Left(x): Either[A, A]) match { case NativelyTwo(a) => a } // warn
+ ^
+t6675b.scala:37: warning: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (SI-6675)
+ def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a } // warn
+ ^
+t6675b.scala:39: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: ((?A17, ?A18), (?A19, ?A20)) where type ?A20 <: A (this is a GADT skolem), type ?A19 <: A (this is a GADT skolem), type ?A18 <: A (this is a GADT skolem), type ?A17 <: A (this is a GADT skolem)
+ def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail
+ ^
+5 warnings found
+four errors found
diff --git a/test/files/neg/t6675b.flags b/test/files/neg/t6675b.flags
new file mode 100644
index 0000000000..1008b0a70c
--- /dev/null
+++ b/test/files/neg/t6675b.flags
@@ -0,0 +1 @@
+-Xlint
diff --git a/test/files/neg/t6675b.scala b/test/files/neg/t6675b.scala
new file mode 100644
index 0000000000..c86c9c3955
--- /dev/null
+++ b/test/files/neg/t6675b.scala
@@ -0,0 +1,40 @@
+object LeftOrRight {
+ def unapply[A](value: Either[A, A]): Option[A] = value match {
+ case scala.Left(x) => Some(x)
+ case scala.Right(x) => Some(x)
+ }
+}
+
+object NativelyTwo {
+ def unapply[A](value: Either[A, A]): Option[(A, A)] = value match {
+ case scala.Left(x) => Some(x -> x)
+ case scala.Right(x) => Some(x -> x)
+ }
+}
+
+
+class A {
+ def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn
+ def f2 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b)) => a } // no warn
+ def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail
+}
+
+class B {
+ def f1[A](x: A) = (Left(x): Either[A, A]) match { case LeftOrRight(a) => a } // no warn
+ def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn
+ def f3[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b)) => a } // no warn
+ def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail
+}
+
+class C {
+ def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn
+ def f2 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b)) => a } // no warn
+ def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a } // fail
+}
+
+class D {
+ def f1[A](x: A) = (Left(x): Either[A, A]) match { case NativelyTwo(a) => a } // warn
+ def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a } // warn
+ def f3[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b)) => a } // no warn
+ def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail
+}
diff --git a/test/files/neg/t7214neg.check b/test/files/neg/t7214neg.check
index 0660cccd02..291af04578 100644
--- a/test/files/neg/t7214neg.check
+++ b/test/files/neg/t7214neg.check
@@ -1,7 +1,4 @@
-t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
+t7214neg.scala:28: error: not enough patterns for object Extractor offering Any: expected 1, found 0
case Extractor() =>
^
-t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
- case Extractor() =>
- ^
-two errors found
+one error found
diff --git a/test/files/neg/t7850.check b/test/files/neg/t7850.check
new file mode 100644
index 0000000000..317be2bbce
--- /dev/null
+++ b/test/files/neg/t7850.check
@@ -0,0 +1,7 @@
+t7850.scala:11: error: an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: Casey)
+ val Casey(x1) = new Casey(1)
+ ^
+t7850.scala:12: error: an unapply result must have a member `def isEmpty: Boolean
+ val Dingy(x2) = new Dingy(1)
+ ^
+two errors found
diff --git a/test/files/neg/t7850.scala b/test/files/neg/t7850.scala
new file mode 100644
index 0000000000..04edad82b5
--- /dev/null
+++ b/test/files/neg/t7850.scala
@@ -0,0 +1,16 @@
+// isEmpty returns non-boolean
+class Casey(a: Int) { def isEmpty = this; def get = this }
+object Casey { def unapply(a: Casey) = a }
+
+// no isEmpty method at all
+class Dingy(a: Int) { def get = this }
+object Dingy { def unapply(a: Dingy) = a }
+
+object Test {
+ def main(args: Array[String]) {
+ val Casey(x1) = new Casey(1)
+ val Dingy(x2) = new Dingy(1)
+ println(s"$x1 $x2")
+ }
+}
+
diff --git a/test/files/neg/t7897.check b/test/files/neg/t7897.check
new file mode 100644
index 0000000000..48eff511c7
--- /dev/null
+++ b/test/files/neg/t7897.check
@@ -0,0 +1,4 @@
+t7897.scala:19: error: value length is not a member of p0.Single
+ case p0.Single(x) => println(s"`$x` has ${x.length} chars")
+ ^
+one error found
diff --git a/test/files/neg/t7897.scala b/test/files/neg/t7897.scala
new file mode 100644
index 0000000000..87c966b1e0
--- /dev/null
+++ b/test/files/neg/t7897.scala
@@ -0,0 +1,23 @@
+package p0 {
+ class Single(val x: Any) extends AnyRef with Product1[String] {
+ private def s = "" + x
+ override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+ def isEmpty = false
+ def get = this
+ def _1 = s + " only"
+
+ override def toString = s"Single(${_1})"
+ }
+
+ object Single {
+ def unapply(x: Any): Single = new Single(x)
+ }
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ "catdog" match {
+ case p0.Single(x) => println(s"`$x` has ${x.length} chars")
+ case x => println("fail: " + x)
+ }
+ }
+}
diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check
index 8c41060ba2..b118792229 100644
--- a/test/files/neg/t997.check
+++ b/test/files/neg/t997.check
@@ -1,7 +1,4 @@
-t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
+t997.scala:13: error: too many patterns for object Foo offering (String, String): expected 2, found 3
"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
^
-t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
-"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
- ^
-two errors found
+one error found
diff --git a/test/files/pos/t2066.scala b/test/files/pos/t2066.scala
new file mode 100644
index 0000000000..30cb99d45c
--- /dev/null
+++ b/test/files/pos/t2066.scala
@@ -0,0 +1,25 @@
+trait A1 {
+ def f[T[+_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[_]] = ()
+}
+
+
+trait A2 {
+ def f[T[-_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[_]] = ()
+}
+
+
+trait A3 {
+ def f[T[X[_]]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[X[+_]]] = ()
+}
diff --git a/test/files/pos/t8045.scala b/test/files/pos/t8045.scala
new file mode 100644
index 0000000000..21154e386a
--- /dev/null
+++ b/test/files/pos/t8045.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+ case class Number(i: Int)
+
+ object UnliftNumber {
+ def unapply(t: Any): Option[Number] = t match {
+ case i: Int => Some(Number(i))
+ case _ => None
+ }
+ }
+
+ def eval(expr: Any): Option[Number] = expr match {
+ case UnliftNumber(n) => Some(n)
+ case _ => None
+ }
+
+ println(eval(1))
+}
diff --git a/test/files/pos/t8046.scala b/test/files/pos/t8046.scala
new file mode 100644
index 0000000000..304d70b6b8
--- /dev/null
+++ b/test/files/pos/t8046.scala
@@ -0,0 +1,20 @@
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Two extends One {
+ trait Op[A] extends (A => A)
+
+ // This compiles
+ class View1 extends Op[Int] { def apply(xs: Int) = xs }
+
+ // ??? base class View2 not found in basetypes of class View2
+ // ./a.scala:9: error: class View2 needs to be abstract, since \
+ // method apply in trait Function1 of type (v1: T1)R is not defined
+ // (Note that T1 does not match Int)
+ // class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+ // ^
+ // one error found
+ class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+}
diff --git a/test/files/pos/t8046b.scala b/test/files/pos/t8046b.scala
new file mode 100644
index 0000000000..45b99fd7e0
--- /dev/null
+++ b/test/files/pos/t8046b.scala
@@ -0,0 +1,16 @@
+trait One {
+ type Op[A]
+ type Alias = Op[Int]
+}
+
+trait Two extends One {
+ trait Op[A] extends M[A]
+ //(a: Alias) => a.value.toChar // okay
+ // (=> A).asSeenFrom(a.type, trait M): => Int
+ class View2 extends Alias { value.toChar } // toChar is not a member of type parameter A
+ // (=> A).asSeenFrom(View2.this.type, trait M): => A
+
+ // override type Alias = Op[Int] // works with this
+}
+
+trait M[A] { def value: A = sys.error("") }
diff --git a/test/files/pos/t8046c.scala b/test/files/pos/t8046c.scala
new file mode 100644
index 0000000000..f05b4c15b5
--- /dev/null
+++ b/test/files/pos/t8046c.scala
@@ -0,0 +1,19 @@
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+ trait Op[A] extends (A => A)
+
+ def f1(f: Op[Int]) = f(5)
+ def f2(f: Alias[Int]) = f(5)
+ def f3[T <: Op[Int]](f: T) = f(5)
+ def f4[T <: Alias[Int]](f: T) = f(5)
+ // ./a.scala:12: error: type mismatch;
+ // found : Int(5)
+ // required: T1
+ // def f4[T <: Alias[Int]](f: T) = f(5)
+ // ^
+}
+
diff --git a/test/files/pos/t8128.scala b/test/files/pos/t8128.scala
new file mode 100644
index 0000000000..b6f76691b0
--- /dev/null
+++ b/test/files/pos/t8128.scala
@@ -0,0 +1,15 @@
+object G {
+ def unapply(m: Any): Option[_] = Some("")
+}
+
+object H {
+ def unapplySeq(m: Any): Option[Seq[_]] = None
+}
+
+object Test {
+ (0: Any) match {
+ case G(v) => v
+ case H(v) => v
+ case _ =>
+ }
+}
diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala
index a71d685f7f..302429b19e 100644
--- a/test/files/run/macroPlugins-namerHooks.scala
+++ b/test/files/run/macroPlugins-namerHooks.scala
@@ -14,19 +14,20 @@ object Test extends DirectTest {
import analyzer._
val output = collection.mutable.ListBuffer[String]()
+ def log(what: String) = output += what.replace(String.format("%n"), " ")
object macroPlugin extends MacroPlugin {
override def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = {
- output += s"enterSym(${tree.toString.replace('\n', ' ')})"
+ log(s"enterSym($tree)")
namer.standardEnterSym(tree)
true
}
override def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = {
- output += s"ensureCompanionObject(${cdef.toString.replace('\n', ' ')}, ...)"
+ log(s"ensureCompanionObject($cdef, ...)")
Some(namer.standardEnsureCompanionObject(cdef, creator))
}
override def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
- stats.foreach(stat => output += s"enterStat(${stat.toString.replace('\n', ' ')})")
+ stats.foreach(stat => log(s"enterStat($stat)"))
stats
}
}
diff --git a/test/files/run/name-based-patmat.check b/test/files/run/name-based-patmat.check
index 1cc605ea3d..3d5fc40ed7 100644
--- a/test/files/run/name-based-patmat.check
+++ b/test/files/run/name-based-patmat.check
@@ -1,3 +1,5 @@
+`catdog only` has 11 chars
+`catdog only, no product` has 23 chars
catdog
2 catdogs! A ha ha!
3 catdogs! A ha ha!
diff --git a/test/files/run/name-based-patmat.scala b/test/files/run/name-based-patmat.scala
index 2c429c141f..8e20940100 100644
--- a/test/files/run/name-based-patmat.scala
+++ b/test/files/run/name-based-patmat.scala
@@ -1,5 +1,33 @@
final class MiniSome[T](val get: T) extends AnyVal { def isEmpty = false }
+package p0 {
+ class Single(val x: Any) extends AnyRef with Product1[String] {
+ private def s = "" + x
+ override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+ def isEmpty = false
+ def get = this
+ def _1 = s + " only"
+
+ override def toString = s"Single(${_1})"
+ }
+
+ object Single {
+ def unapply(x: Any): Single = new Single(x)
+ }
+
+ class SingleNoProduct(val x: Any) extends AnyRef {
+ private def s = "" + x
+ def isEmpty = false
+ def get = s + " only, no product"
+
+ override def toString = s"SingleNoProduct($get)"
+ }
+
+ object SingleNoProduct {
+ def unapply(x: Any): SingleNoProduct = new SingleNoProduct(x)
+ }
+}
+
package p1 {
class Triple(val x: Any) extends AnyRef with Product3[String, String, String] {
private def s = "" + x
@@ -49,14 +77,16 @@ package p3 {
}
object Test {
-
- // def f(x: Any) = x match {
- // case p1.Foo(x, y, z) => println((x, y, z))
- // case x => println(x)
- // }
-
def main(args: Array[String]): Unit = {
"catdog" match {
+ case p0.Single(x) => println(s"`${x._1}` has ${x._1.length} chars")
+ case x => println("fail: " + x)
+ }
+ "catdog" match {
+ case p0.SingleNoProduct(x) => println(s"`$x` has ${x.length} chars")
+ case x => println("fail: " + x)
+ }
+ "catdog" match {
case p1.Triple(x, y, z) => List(x, y, z) foreach println
case x => println("fail: " + x)
}
diff --git a/test/files/run/patmat-mix-case-extractor.check b/test/files/run/patmat-mix-case-extractor.check
new file mode 100644
index 0000000000..a6e1bd23df
--- /dev/null
+++ b/test/files/run/patmat-mix-case-extractor.check
@@ -0,0 +1,8 @@
+-1
+6
+4
+18
+-1
+1006
+1004
+1018
diff --git a/test/files/run/patmat-mix-case-extractor.scala b/test/files/run/patmat-mix-case-extractor.scala
new file mode 100644
index 0000000000..964e6f743c
--- /dev/null
+++ b/test/files/run/patmat-mix-case-extractor.scala
@@ -0,0 +1,110 @@
+trait CaseClass
+trait ProdCaseClass extends CaseClass { def x: Int }
+trait SeqCaseClass extends CaseClass { def xs: Seq[Int] }
+
+case class CaseClass1() extends CaseClass
+case class CaseClass2(xs: Int*) extends SeqCaseClass
+case class CaseClass3(x: Int) extends ProdCaseClass
+case class CaseClass4(x: Int, xs: Int*) extends ProdCaseClass with SeqCaseClass
+
+object Extractor1 { def unapply(x: CaseClass): Boolean = false }
+object Extractor2 { def unapplySeq(x: SeqCaseClass): Option[Seq[Int]] = Some(x.xs) }
+object Extractor3 { def unapply(x: ProdCaseClass): Option[Int] = Some(x.x) }
+object Extractor4 { def unapplySeq(x: ProdCaseClass with SeqCaseClass): Option[(Int, Seq[Int])] = Some(x.x, x.xs) }
+
+class A {
+ def f1(x: Any) = x match {
+ case CaseClass1() => -1
+ case CaseClass2(xs @ _*) => xs.sum
+ case CaseClass3(x) => x
+ case CaseClass4(x, xs @ _*) => x + xs.sum
+ case Extractor4(x, xs @ _*) => 1000 + x + xs.sum
+ case Extractor3(x) => 1000 + x
+ case Extractor2(xs @ _*) => 1000 + xs.sum
+ case Extractor1() => -3
+ case _ => -2
+ }
+ def f2(x: Any) = x match {
+ case Extractor4(x, xs @ _*) => 1000 + x + xs.sum
+ case Extractor3(x) => 1000 + x
+ case Extractor2(xs @ _*) => 1000 + xs.sum
+ case Extractor1() => -3
+ case CaseClass1() => -1
+ case CaseClass2(xs @ _*) => xs.sum
+ case CaseClass3(x) => x
+ case CaseClass4(x, xs @ _*) => x + xs.sum
+ case _ => -2
+ }
+ def run() {
+ List(
+ f1(CaseClass1()),
+ f1(CaseClass2(1, 2, 3)),
+ f1(CaseClass3(4)),
+ f1(CaseClass4(5, 6, 7)),
+ f2(CaseClass1()),
+ f2(CaseClass2(1, 2, 3)),
+ f2(CaseClass3(4)),
+ f2(CaseClass4(5, 6, 7))
+ ) foreach println
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ (new A).run
+ }
+}
+
+
+class B {
+ case class CaseClass0()
+ case class CaseClass0v(xs: Int*)
+
+ case class CaseClass(x: Int, y: Int)
+ object Extractor { def unapply(x: Any): Option[(Int, Int)] = Some((1, 1)) }
+
+ case class CaseSeq(x: Char, y: Double, zs: Int*)
+ object ExtractorSeq { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] = Some((1, 1, List(1))) }
+
+ def f1(x: CaseClass) = x match { case CaseClass(y, z) => y }
+ def f2(x: Any) = x match { case Extractor(y, z) => y }
+
+ def f3(x: CaseSeq) = x match {
+ case CaseSeq(x, y) => y
+ case CaseSeq(x, y, z) => z
+ }
+ def f4(x: CaseSeq) = x match {
+ case CaseSeq(x, y, z) => z :: Nil
+ case CaseSeq(x, y, z @ _*) => z
+ }
+
+ def f5(x: Any) = x match { case ExtractorSeq(x, y, z) => z }
+ def f6(x: Any) = x match { case ExtractorSeq(x, y, z @ _*) => z }
+
+ def g1(x: CaseClass0) = x match {
+ case CaseClass0() => true
+ }
+ def g2(x: CaseClass0v) = x match {
+ case CaseClass0v() => true
+ case CaseClass0v(5) => true
+ case CaseClass0v(x) => true
+ case CaseClass0v(xs @ _*) => false
+ }
+}
+
+package p1 {
+ trait _X {
+ case class _Foo();
+ object _Bar {
+ def unapply(foo: _Foo): Boolean = true;
+ }
+ }
+
+ object Y extends _X {
+ val foo = _Foo()
+ foo match {
+ case _Bar() =>
+ case _ => assert(false)
+ }
+ }
+}
diff --git a/test/files/run/t8046.check b/test/files/run/t8046.check
new file mode 100644
index 0000000000..905b0b35ca
--- /dev/null
+++ b/test/files/run/t8046.check
@@ -0,0 +1,2 @@
+List(trait Op, trait Function1, class Object, class Any)
+BTS(T,Three.this.Op[Int],Int => Int,Object,Any)
diff --git a/test/files/run/t8046/Test.scala b/test/files/run/t8046/Test.scala
new file mode 100644
index 0000000000..f6b525d1b5
--- /dev/null
+++ b/test/files/run/t8046/Test.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ override def code = ""
+ override def extraSettings: String = "-usejavacp"
+
+ override def show() {
+ val c = newCompiler()
+ new c.Run
+ import c._
+
+ val f4 = typeOf[Three].member(newTermName("f4"))
+ val f4ParamInfo = f4.paramss.head.head.info
+ println(f4ParamInfo.baseClasses)
+ println(f4ParamInfo.baseTypeSeq)
+ }
+}
+
diff --git a/test/files/run/t8046/t8046c.scala b/test/files/run/t8046/t8046c.scala
new file mode 100644
index 0000000000..0b484da530
--- /dev/null
+++ b/test/files/run/t8046/t8046c.scala
@@ -0,0 +1,13 @@
+import language._
+
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+ trait Op[A] extends (A => A)
+
+ def f4[T <: Alias[Int]](f: T) = 0
+}
+
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 6c7c32bfdf..1eb186f303 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -127,6 +127,47 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
(visited == expectedSize(r)) :| str(r)
}
+ property("sum") = forAll(myGen) { r =>
+// println("----------")
+// println("sum "+str(r))
+ val rSum = r.sum
+ val expected = r.length match {
+ case 0 => 0
+ case 1 => r.head
+ case _ => ((r.head + r.last).toLong * r.length / 2).toInt
+ }
+// println("size: " + r.length)
+// println("expected: " + expected)
+// println("obtained: " + rSum)
+
+ (rSum == expected) :| str(r)
+ }
+
+/* checks that sum respects custom Numeric */
+ property("sumCustomNumeric") = forAll(myGen) { r =>
+ val mod = 65536
+ object mynum extends Numeric[Int] {
+ def plus(x: Int, y: Int): Int = (x + y) % mod
+ override def zero = 0
+
+ def fromInt(x: Int): Int = ???
+ def minus(x: Int, y: Int): Int = ???
+ def negate(x: Int): Int = ???
+ def times(x: Int, y: Int): Int = ???
+ def toDouble(x: Int): Double = ???
+ def toFloat(x: Int): Float = ???
+ def toInt(x: Int): Int = ((x % mod) + mod * 2) % mod
+ def toLong(x: Int): Long = ???
+ def compare(x: Int, y: Int): Int = ???
+ }
+
+ val rSum = r.sum(mynum)
+ val expected = mynum.toInt(r.sum)
+
+ (rSum == expected) :| str(r)
+ }
+
+
property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
// println("length "+str(r))
(r.length == expectedSize(r)) :| str(r)
diff --git a/test/junit/scala/collection/PagedSeq.scala b/test/junit/scala/collection/PagedSeq.scala
new file mode 100644
index 0000000000..5f83cf6f31
--- /dev/null
+++ b/test/junit/scala/collection/PagedSeq.scala
@@ -0,0 +1,16 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+/* Test for SI-6615 */
+@RunWith(classOf[JUnit4])
+class PagedSeqTest {
+ @Test
+ def rovingDoesNotNPE(): Unit = {
+ // should not NPE, and should equal the given Seq
+ assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
+ }
+}
diff --git a/test/pending/pos/t6161.scala b/test/pending/pos/t6161.scala
new file mode 100644
index 0000000000..5783cc85f2
--- /dev/null
+++ b/test/pending/pos/t6161.scala
@@ -0,0 +1,22 @@
+object t6161 {
+ trait N {
+ type Name
+ }
+
+ trait N1 extends N {
+ class Name {
+ type ThisNameType <: Name
+ def encode: ThisNameType = ???
+ }
+ }
+
+ trait S {
+ self: N => // change to N1 and it compiles
+ type NameType <: Name
+ }
+
+ object g extends S with N1
+
+ val n1: g.NameType = ???
+ val n2: g.Name = n1.encode
+}
diff --git a/test/pending/pos/t8128b.scala b/test/pending/pos/t8128b.scala
new file mode 100644
index 0000000000..dd44a25a90
--- /dev/null
+++ b/test/pending/pos/t8128b.scala
@@ -0,0 +1,18 @@
+class Optiony[X] { def isEmpty = true; def get: X = ??? }
+class Seqy[X] { def head: X = ???; def length = 0; def apply(i: Int): X = ??? }
+
+object G {
+ def unapply(m: Any): Optiony[_] = ???
+}
+
+object H {
+ def unapplySeq(m: Any): Optiony[Seqy[_]] = ???
+}
+
+object Test {
+ (0: Any) match {
+ case G(v) => v
+ case H(v) => v
+ case _ =>
+ }
+}
diff --git a/test/files/presentation/context-bounds1.check b/test/pending/presentation/context-bounds1.check
index b444de59a4..b444de59a4 100644
--- a/test/files/presentation/context-bounds1.check
+++ b/test/pending/presentation/context-bounds1.check
diff --git a/test/files/presentation/context-bounds1/Test.scala b/test/pending/presentation/context-bounds1/Test.scala
index bec1131c4c..bec1131c4c 100644
--- a/test/files/presentation/context-bounds1/Test.scala
+++ b/test/pending/presentation/context-bounds1/Test.scala
diff --git a/test/files/presentation/context-bounds1/src/ContextBounds.scala b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
index 72a8f694a3..72a8f694a3 100644
--- a/test/files/presentation/context-bounds1/src/ContextBounds.scala
+++ b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
diff --git a/test/files/run/reflection-sync-potpourri.scala b/test/pending/run/reflection-sync-potpourri.scala
index 0ad5f2ab66..0ad5f2ab66 100644
--- a/test/files/run/reflection-sync-potpourri.scala
+++ b/test/pending/run/reflection-sync-potpourri.scala
diff --git a/versions.properties b/versions.properties
index 90448dc49a..62ba7ac379 100644
--- a/versions.properties
+++ b/versions.properties
@@ -12,6 +12,10 @@ scala-continuations-plugin.version.number=1.0.0-RC2
scala-continuations-library.version.number=1.0.0-RC2
scala-swing.version.number=1.0.0-RC2
+# these ship with distribution (and scala-library-all depends on them)
+akka-actor.version.number=2.2.3
+actors-migration.version.number=1.0.0
+
# external modules, used internally (not shipped)
partest.version.number=1.0.0-RC8
scalacheck.version.number=1.11.1