summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build-ant-macros.xml9
-rwxr-xr-xbuild.xml239
-rw-r--r--doc/LICENSE.md (renamed from docs/LICENSE)21
-rw-r--r--doc/License.rtf65
-rw-r--r--doc/README (renamed from docs/README)2
-rw-r--r--doc/licenses/apache_jansi.txt (renamed from docs/licenses/apache_jansi.txt)0
-rw-r--r--doc/licenses/bsd_asm.txt (renamed from docs/licenses/bsd_asm.txt)0
-rw-r--r--doc/licenses/bsd_jline.txt (renamed from docs/licenses/bsd_jline.txt)0
-rw-r--r--doc/licenses/mit_jquery-layout.txt (renamed from docs/licenses/mit_jquery-layout.txt)0
-rw-r--r--doc/licenses/mit_jquery-ui.txt (renamed from docs/licenses/mit_jquery-ui.txt)0
-rw-r--r--doc/licenses/mit_jquery.txt (renamed from docs/licenses/mit_jquery.txt)0
-rw-r--r--doc/licenses/mit_sizzle.txt (renamed from docs/licenses/mit_sizzle.txt)0
-rw-r--r--doc/licenses/mit_tools.tooltip.txt (renamed from docs/licenses/mit_tools.tooltip.txt)0
-rw-r--r--src/build/maven/maven-deploy.xml171
-rw-r--r--src/build/maven/scala-compiler-pom.xml2
-rw-r--r--src/build/maven/scala-dist-pom.xml75
-rw-r--r--src/build/maven/scala-library-all-pom.xml99
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Enclosures.scala4
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala20
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala43
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala283
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala367
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala122
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala78
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala23
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala4
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala2
-rw-r--r--src/library/rootdoc.txt63
-rw-r--r--src/library/scala/AnyVal.scala2
-rw-r--r--src/library/scala/App.scala12
-rw-r--r--src/library/scala/collection/GenSeqLike.scala29
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala24
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala8
-rw-r--r--src/library/scala/collection/TraversableOnce.scala28
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala6
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala33
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala5
-rw-r--r--src/library/scala/collection/immutable/Range.scala21
-rw-r--r--src/library/scala/math/Numeric.scala4
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala12
-rw-r--r--src/library/scala/runtime/MethodCache.scala8
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/library/scala/util/Sorting.scala4
-rw-r--r--src/manual/scala/man1/scala.scala6
-rw-r--r--src/manual/scala/man1/scalac.scala185
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala16
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala3
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala24
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala5
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala20
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala9
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala16
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala11
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala13
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala1
-rw-r--r--src/swing/scala/swing/Publisher.scala174
-rw-r--r--test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala2
-rw-r--r--test/files/neg/macro-incompatible-macro-engine.check7
-rw-r--r--test/files/neg/macro-incompatible-macro-engine/Macros_2.flags1
-rw-r--r--test/files/neg/macro-incompatible-macro-engine/Macros_2.scala7
-rw-r--r--test/files/neg/macro-incompatible-macro-engine/Plugin_1.scala35
-rw-r--r--test/files/neg/macro-incompatible-macro-engine/Test_3.scala4
-rw-r--r--test/files/neg/macro-incompatible-macro-engine/scalac-plugin.xml4
-rw-r--r--test/files/neg/t2066.check21
-rw-r--r--test/files/neg/t2066.scala70
-rw-r--r--test/files/neg/t2066b.check5
-rw-r--r--test/files/neg/t2066b.scala (renamed from test/pending/neg/t2066.scala)0
-rw-r--r--test/files/pos/t2066.scala25
-rw-r--r--test/files/pos/t8046.scala20
-rw-r--r--test/files/pos/t8046b.scala16
-rw-r--r--test/files/pos/t8046c.scala19
-rw-r--r--test/files/pos/t8064.flags1
-rw-r--r--test/files/pos/t8064/Client_2.scala8
-rw-r--r--test/files/pos/t8064/Macro_1.scala10
-rw-r--r--test/files/pos/t8064b.flags1
-rw-r--r--test/files/pos/t8064b/Client_2.scala6
-rw-r--r--test/files/pos/t8064b/Macro_1.scala11
-rw-r--r--test/files/pos/t8120.scala9
-rw-r--r--test/files/presentation/hyperlinks-macro.check11
-rw-r--r--test/files/presentation/hyperlinks-macro/Runner.scala8
-rw-r--r--test/files/presentation/hyperlinks-macro/src/MacroCall.scala11
-rw-r--r--test/files/presentation/t4287.check11
-rw-r--r--test/files/presentation/t4287/Test.scala3
-rw-r--r--test/files/presentation/t4287/src/Foo.scala5
-rw-r--r--test/files/presentation/t4287b.check6
-rw-r--r--test/files/presentation/t4287b/Test.scala3
-rw-r--r--test/files/presentation/t4287b/src/Foo.scala15
-rw-r--r--test/files/presentation/t4287c.check11
-rw-r--r--test/files/presentation/t4287c.flags1
-rw-r--r--test/files/presentation/t4287c/Test.scala3
-rw-r--r--test/files/presentation/t4287c/src/Foo.scala9
-rw-r--r--test/files/run/macro-default-params.check1
-rw-r--r--test/files/run/macro-default-params/Macros_1.scala27
-rw-r--r--test/files/run/macro-default-params/Test_2.scala3
-rw-r--r--test/files/run/macro-whitebox-fundep-materialization/Test_2.scala2
-rw-r--r--test/files/run/macroPlugins-macroArgs.check2
-rw-r--r--test/files/run/macroPlugins-macroArgs/Macros_2.scala11
-rw-r--r--test/files/run/macroPlugins-macroArgs/Plugin_1.scala21
-rw-r--r--test/files/run/macroPlugins-macroArgs/Test_3.flags1
-rw-r--r--test/files/run/macroPlugins-macroArgs/Test_3.scala4
-rw-r--r--test/files/run/macroPlugins-macroArgs/scalac-plugin.xml4
-rw-r--r--test/files/run/macroPlugins-macroExpand.check2
-rw-r--r--test/files/run/macroPlugins-macroExpand/Macros_2.scala18
-rw-r--r--test/files/run/macroPlugins-macroExpand/Plugin_1.scala27
-rw-r--r--test/files/run/macroPlugins-macroExpand/Test_3.flags1
-rw-r--r--test/files/run/macroPlugins-macroExpand/Test_3.scala4
-rw-r--r--test/files/run/macroPlugins-macroExpand/scalac-plugin.xml4
-rw-r--r--test/files/run/macroPlugins-macroRuntime.check2
-rw-r--r--test/files/run/macroPlugins-macroRuntime/Macros_2.scala11
-rw-r--r--test/files/run/macroPlugins-macroRuntime/Plugin_1.scala20
-rw-r--r--test/files/run/macroPlugins-macroRuntime/Test_3.flags1
-rw-r--r--test/files/run/macroPlugins-macroRuntime/Test_3.scala4
-rw-r--r--test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml4
-rw-r--r--test/files/run/macroPlugins-namerHooks.check45
-rw-r--r--test/files/run/macroPlugins-namerHooks.scala39
-rw-r--r--test/files/run/macroPlugins-typedMacroBody.check2
-rw-r--r--test/files/run/macroPlugins-typedMacroBody/Macros_2.flags1
-rw-r--r--test/files/run/macroPlugins-typedMacroBody/Macros_2.scala18
-rw-r--r--test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala21
-rw-r--r--test/files/run/macroPlugins-typedMacroBody/Test_3.scala4
-rw-r--r--test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml4
-rw-r--r--test/files/run/mutable-anyrefmap.scala91
-rw-r--r--test/files/run/mutable-longmap.scala79
-rw-r--r--test/files/run/t4287inferredMethodTypes.check30
-rw-r--r--test/files/run/t4287inferredMethodTypes.scala25
-rw-r--r--test/files/run/t5603.check4
-rw-r--r--test/files/run/t7974.check104
-rw-r--r--test/files/run/t7974/Symbols.scala6
-rw-r--r--test/files/run/t7974/Test.scala20
-rw-r--r--test/files/run/t8046.check2
-rw-r--r--test/files/run/t8046/Test.scala18
-rw-r--r--test/files/run/t8046/t8046c.scala13
-rw-r--r--test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala2
-rw-r--r--test/files/scalacheck/range.scala41
-rw-r--r--test/junit/scala/collection/ArraySortingTest.scala29
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala479
-rw-r--r--test/junit/scala/math/NumericTest.scala18
-rw-r--r--test/pending/pos/t6161.scala22
-rw-r--r--test/pending/presentation/context-bounds1.check51
-rw-r--r--test/pending/presentation/context-bounds1/Test.scala3
-rw-r--r--test/pending/presentation/context-bounds1/src/ContextBounds.scala13
-rw-r--r--test/pending/run/reflection-sync-potpourri.scala (renamed from test/files/run/reflection-sync-potpourri.scala)0
-rw-r--r--test/scaladoc/run/SI-6812.check1
-rw-r--r--test/scaladoc/run/SI-6812.scala2
-rw-r--r--test/scaladoc/run/SI-6812b.check1
-rw-r--r--test/scaladoc/run/SI-6812b.scala24
-rw-r--r--versions.properties4
156 files changed, 3155 insertions, 1076 deletions
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
index 593f93b784..0b92f1dab1 100644
--- a/build-ant-macros.xml
+++ b/build-ant-macros.xml
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="build-support">
+<project name="build-support" xmlns:artifact="urn:maven-artifact-ant">
<description> Macros for Scala's ant build </description>
<macrodef name="optimized">
@@ -451,8 +451,8 @@
<attribute name="project"/>
<sequential>
<local name="artifact-base"/>
- <property name="artifact-base" value="${maven-base}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
- <mkdir dir="${maven-base}/${@{project}.dir}${@{project}.name}"/>
+ <property name="artifact-base" value="${dist.maven}/${@{project}.dir}${@{project}.name}/${@{project}.name}"/>
+ <mkdir dir="${dist.maven}/${@{project}.dir}${@{project}.name}"/>
<copy tofile="${artifact-base}.jar" file="${build-osgi.dir}/org.scala-lang.${@{project}.package}${@{project}.name}${@{project}.namesuffix}.jar" overwrite="true"/>
<copy tofile="${artifact-base}-src.jar" file="${build-osgi.dir}/${@{project}.name}-src.jar" overwrite="true"/>
<copy tofile="${artifact-base}-pom.xml" file="${src.dir}/build/maven/${@{project}.dir}/${@{project}.name}-pom.xml" overwrite="true"/>
@@ -469,7 +469,8 @@
</sequential>
</macrodef>
-
+ <!-- TODO inline maven-deploy.xml's macrodefs, remove maven-deploy.xml -->
+ <include file="src/build/maven/maven-deploy.xml" as="deploy-macros"/>
<macrodef name="testSuite">
<attribute name="dir" default="${partest.dir}"/>
diff --git a/build.xml b/build.xml
index 53cd998f2d..00ad5f78e9 100755
--- a/build.xml
+++ b/build.xml
@@ -4,17 +4,14 @@
xmlns:artifact="urn:maven-artifact-ant"
xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<include file="build-ant-macros.xml" as="macros"/>
- <include file="src/build/maven/maven-deploy.xml" as="maven-deploy"/>
<description>
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
</description>
<!-- HINTS
-
- for faster builds, have a build.properties in the same directory as build.xml that says:
locker.skip=1
-
-->
<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
@@ -27,11 +24,14 @@ antArgs tend to be:
scalacArgs examples:
"-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
-targets exercised:
- deploy-core.snapshot publish-opt-nodocs distpack-maven-opt nightly locker.done build build-opt test.suite test.continuations.suite test.scaladoc
+supported/exercised targets
+ to publish: nightly publish-opt-nodocs
+ to build: build build-opt locker.done
+ to run tests: test.suite test.scaladoc
+
+DO NOT RELY ON ANY OTHER TARGETS (ok, you're probably ok assuming the ones defined in the first 100 lines of this file)
-NOTE: after distpack-maven-opt, it is expected there's a build file in dists/maven/latest that defines targets deploy and deploy.local
-TODO: get rid of this separate step
+NOTE: dists/maven/latest/build.xml will soon disappear; call `publish` in this build instead
-->
<!-- To use Zinc with the ant build:
@@ -70,7 +70,6 @@ TODO:
<!-- packaging -->
<target name="distpack" depends="pack-archives.done, pack-maven.done"/>
<target name="distpack-maven" depends="pack-maven.done"/>
-
<target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
<target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
@@ -81,7 +80,6 @@ TODO:
<param name="scalac.args.optimise" value="-optimise"/>
</antcall>
</target>
-
<target name="publish-core-opt-nodocs" description="Builds an untested, undocumented optimised core (library/reflect/compiler) and publishes to maven.">
<antcall target="publish-core">
<param name="docs.skip" value="1"/>
@@ -93,24 +91,22 @@ TODO:
<param name="docs.skip" value="1"/>
</antcall>
</target>
-
<target name="all.done" depends="test.done, distpack"/>
-
<target name="nightly-nopt" depends="all.done"/>
<target name="nightly"><optimized name="nightly-nopt"/></target>
-
<target name="nightly.checkall">
<antcall target="nightly-nopt"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
+ <!-- prefer the sbt names, but the dotted names are used in jenkins;
+ rename there first before dropping the dotted ones -->
+ <target name="publish-local" depends="publish.local"/>
+ <target name="publish-signed" depends="publish.signed"/>
+
<target name="clean" depends="quick.clean" description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
<target name="docsclean" depends="docs.clean" description="Removes generated documentation. Distributions are untouched."/>
<target name="distclean" depends="dist.clean" description="Removes all distributions. Binaries and documentation are untouched."/>
- <target name="test.continuations.suite">
- <echo message="DEPRECATED: the continuations have moved to https://github.com/scala/scala-continuations"/>
- </target>
-
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
@@ -120,7 +116,7 @@ TODO:
<property name="build.sysclasspath" value="ignore"/>
<!-- Defines the repository layout -->
- <property name="docs.dir" value="${basedir}/docs"/>
+ <property name="doc.dir" value="${basedir}/doc"/>
<property name="lib.dir" value="${basedir}/lib"/>
<property name="src.dir" value="${basedir}/src"/>
<property name="partest.dir" value="${basedir}/test"/>
@@ -151,6 +147,7 @@ TODO:
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
<property name="build-pack.dir" value="${build.dir}/pack"/>
+ <property name="build-manual.dir" value="${build.dir}/manual"/>
<property name="build-osgi.dir" value="${build.dir}/osgi"/>
<property name="build-junit.dir" value="${build.dir}/junit"/>
<property name="build-strap.dir" value="${build.dir}/strap"/>
@@ -401,6 +398,13 @@ TODO:
<property name="version.number" value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
</else></if>
+ <!-- some default in case something went wrong getting the revision -->
+ <property name="version.number" value="-unknown-"/>
+
+ <property name="dist.name" value="scala-${version.number}"/>
+ <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+ <property name="dist.maven" value="${dists.dir}/maven/${version.number}"/>
+
<condition property="has.java6">
<equals arg1="${ant.java.version}" arg2="1.6"/>
</condition>
@@ -445,10 +449,6 @@ TODO:
<format property="short" pattern="yyyyMMddHHmmss"/>
</tstamp>
- <!-- some default in case something went wrong getting the revision -->
- <property name="version.number" value="-unknown-"/>
- <property name="init.avail" value="yes"/>
-
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
@@ -1559,8 +1559,36 @@ TODO:
</staged-docs>
</target>
- <target name="docs.man" depends="docs.start">
- <staged-uptodate stage="docs" project="manual">
+ <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
+ <!-- TODO modularize compiler: docs.scaladoc, docs.interactive, -->
+ <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
+
+<!-- ===========================================================================
+ DISTRIBUTION
+============================================================================ -->
+ <!-- bin/ -->
+ <target name="dist.bin" depends="pack.bin">
+ <mkdir dir="${dist.dir}/bin"/>
+ <copy toDir="${dist.dir}/bin" overwrite="true">
+ <fileset dir="${build-pack.dir}/bin"/>
+ </copy>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scalac"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
+ <chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
+ </target>
+
+ <!-- doc/ and man/ -->
+ <target name="dist.doc" depends="scaladoc.task" unless="docs.skip"> <!-- depends on scaladoc.task for scalac taskdef -->
+ <mkdir dir="${dist.dir}/doc"/>
+ <copy toDir="${dist.dir}/doc" overwrite="true">
+ <fileset dir="${doc.dir}"/>
+ </copy>
+
+ <mkdir dir="${dist.dir}/doc/tools"/>
+ <mkdir dir="${dist.dir}/man/man1"/>
+ <staged-uptodate stage="manual" project="manual">
<check><srcfiles dir="${src.dir}/manual"/></check>
<do>
<mkdir dir="${build.dir}/manmaker/classes"/>
@@ -1570,44 +1598,33 @@ TODO:
srcdir="${src.dir}/manual"
includes="**/*.scala"
addparams="${scalac.args.all} -language:implicitConversions"/>
- <mkdir dir="${build-docs.dir}/manual/man/man1"/>
- <mkdir dir="${build-docs.dir}/manual/html"/>
- <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
+ <mkdir dir="${build-manual.dir}/genman/man1"/>
<taskdef name="genman"
classname="scala.tools.docutil.ManMaker"
classpathref="manual.build.path"/>
<genman command="fsc, scala, scalac, scaladoc, scalap"
- htmlout="${build-docs.dir}/manual/html"
- manout="${build-docs.dir}/manual/genman"/>
- <!-- On Windows source and target files can't be the same ! -->
- <fixcrlf
- srcdir="${build-docs.dir}/manual/genman"
- destdir="${build-docs.dir}/manual/man"
- eol="unix" includes="**/*.1"/>
- <copy todir="${build-docs.dir}/manual/html" overwrite="true">
- <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.gif"/>
- <include name="**/*.png"/>
- </fileset>
- </copy>
+ htmlout="${dist.dir}/doc/tools"
+ manout="${build-manual.dir}/genman"/>
</do>
</staged-uptodate>
- </target>
-
- <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
- <!-- TODO modularize compiler: docs.scaladoc, docs.interactive, -->
- <target name="docs.done" depends="docs.core, docs.actors, docs.scalap" unless="docs.skip"/>
-
-<!-- ===========================================================================
- DISTRIBUTION
-============================================================================ -->
- <target name="dist.base" depends="osgi.done">
- <property name="dist.name" value="scala-${version.number}"/>
- <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+ <!-- On Windows source and target files can't be the same ! -->
+ <fixcrlf
+ srcdir="${build-manual.dir}/genman"
+ destdir="${dist.dir}/man"
+ eol="unix" includes="**/*.1"/>
+ <copy todir="${dist.dir}/doc/tools" overwrite="true">
+ <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
+ <include name="**/*.html"/>
+ <include name="**/*.css"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.png"/>
+ </fileset>
+ </copy>
+ </target>
+ <!-- lib/ and src/ (jars: classes and sources) -->
+ <target name="dist.lib" depends="osgi.done">
<mkdir dir="${dist.dir}/lib"/>
<mkdir dir="${dist.dir}/src"/>
@@ -1637,29 +1654,10 @@ TODO:
</fileset>
</copy>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
-
- <mkdir dir="${dist.dir}/bin"/>
- <copy toDir="${dist.dir}/bin" overwrite="true">
- <fileset dir="${build-pack.dir}/bin"/>
- </copy>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
</target>
-
- <target name="dist.doc" depends="dist.base, docs.done">
- <mkdir dir="${dist.dir}/doc"/>
- <mkdir dir="${dist.dir}/doc/licenses"/>
- <mkdir dir="${dist.dir}/doc/tools"/>
- <copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc" overwrite="true"/>
- <copy file="${docs.dir}/README" toDir="${dist.dir}/doc" overwrite="true"/>
- <copy toDir="${dist.dir}/doc/licenses" overwrite="true">
- <fileset dir="${docs.dir}/licenses"/>
- </copy>
-
+ <!-- api/ (scaladoc) -->
+ <target name="dist.api" depends="docs.done" unless="docs.skip">
<mkdir dir="${dist.dir}/api"/>
<copy toDir="${dist.dir}/api" overwrite="true">
<fileset dir="${build-docs.dir}/library"/>
@@ -1674,20 +1672,7 @@ TODO:
</copy>
</target>
-
- <target name="dist.man" depends="dist.base, docs.man">
- <mkdir dir="${dist.dir}/man"/>
- <copy toDir="${dist.dir}/man" overwrite="true">
- <fileset dir="${build-docs.dir}/manual/man"/>
- </copy>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/tools" overwrite="true">
- <fileset dir="${build-docs.dir}/manual/html"/>
- </copy>
- </target>
-
-
- <target name="dist.partial" depends="dist.base">
+ <target name="dist.partial" depends="dist.lib, dist.bin, dist.doc">
<if><not><os family="windows"/></not><then>
<symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="true"/>
</then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
@@ -1695,12 +1680,13 @@ TODO:
</else></if>
</target>
- <target name="dist.done" depends="dist.doc, dist.man, dist.partial"/>
+ <target name="dist.done" depends="dist.partial, dist.api"/>
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
============================================================================ -->
+ <!-- TODO: get rid of this, it's redundant between maven and github -->
<target name="pack-archives.done" depends="dist.done, docs.done">
<mkdir dir="${dists.dir}/archives"/>
<property name="archive-base" value="${dists.dir}/archives/${dist.name}"/>
@@ -1716,7 +1702,7 @@ MAIN DISTRIBUTION PACKAGING
<if><not><isset property="docs.skip"/></not><then>
<tarz name="${archive-base}-devel-docs">
- <tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
+ <tarfileset dir="${dist.dir}/api" prefix="${dist.name}-devel-docs"/>
</tarz>
</then></if>
@@ -1761,13 +1747,13 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-maven.core" depends="osgi.core, docs.core">
- <property name="maven-base" value="${dists.dir}/maven/${version.number}"/>
- <mkdir dir="${maven-base}"/>
-
<mvn-package project="library"/>
<mvn-package project="reflect"/>
<mvn-package project="compiler"/>
+ <copy tofile="${dist.maven}/scala-library-all/scala-library-all-pom.xml"
+ file="${src.dir}/build/maven/scala-library-all-pom.xml" overwrite="true"/>
+
<!-- for replacestarr -->
<if><isset property="update.starr.version"/><then>
<echo message="From now on, ${maven.version.number} will be used as STARR (`build.properties`'s `starr.version` was modified)."/>
@@ -1777,7 +1763,16 @@ MAIN DISTRIBUTION PACKAGING
</then></if>
</target>
- <target name="pack-maven.base" depends="pack-maven.core, osgi.done, docs.done">
+ <target name="pack-maven.dist" depends="dist.bin, dist.doc">
+ <copy tofile="${dist.maven}/scala-dist/scala-dist-pom.xml" file="${src.dir}/build/maven/scala-dist-pom.xml" overwrite="true"/>
+ <jar whenmanifestonly="fail" destfile="${dist.maven}/scala-dist/scala-dist.jar" basedir="${dist.dir}">
+ <include name="bin/" />
+ <include name="doc/" />
+ <include name="man/" />
+ </jar>
+ </target>
+
+ <target name="pack-maven.base" depends="pack-maven.core, osgi.done, docs.done, pack-maven.dist">
<!-- TODO modularize compiler
<mvn-package project="interactive"/>
<mvn-package project="scaladoc"/>
@@ -1786,19 +1781,21 @@ MAIN DISTRIBUTION PACKAGING
<mvn-package project="actors"/>
<!-- don't bother fitting scalap into the mould: it will move out soon -->
- <copy tofile="${maven-base}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
- <copy tofile="${maven-base}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
- <jar destfile="${maven-base}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
+ <mkdir dir="${dist.maven}"/>
+ <copy tofile="${dist.maven}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
+ <copy tofile="${dist.maven}/scalap/scalap.jar" file="${scalap.jar}" overwrite="true"/>
+ <jar destfile="${dist.maven}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
<if><not><isset property="docs.skip"/></not><then>
- <jar destfile="${maven-base}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
+ <jar destfile="${dist.maven}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
</then></if>
</target>
+ <!-- TODO: remove this target and delete src/build/maven-deploy.xml -->
<target name="pack-maven.done" depends="pack-maven.base">
<!-- Create dists/maven/latest alias and copy maven-deploy ant build there. -->
<if><isset property="os.win"/><then>
<copy todir="${dists.dir}/maven/latest" overwrite="true">
- <fileset dir="${maven-base}"/>
+ <fileset dir="${dist.maven}"/>
</copy>
</then><else>
<symlink link="${dists.dir}/maven/latest"
@@ -1806,17 +1803,17 @@ MAIN DISTRIBUTION PACKAGING
overwrite="true"/>
</else></if>
<!-- copy build file and its dependencies -->
- <copy todir="${maven-base}"
+ <copy todir="${dist.maven}"
file="${lib-ant.dir}/ant-contrib.jar" overwrite="true"/>
- <copy todir="${maven-base}"
+ <copy todir="${dist.maven}"
file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" overwrite="true"/>
- <copy tofile="${maven-base}/build.xml"
+ <copy tofile="${dist.maven}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
<!-- export properties for use when deploying -->
- <echoproperties destfile="${maven-base}/build.properties"/>
+ <echoproperties destfile="${dist.maven}/build.properties"/>
</target>
- <!-- keep these properties out of ${maven-base}/build.properties, dumped in pack-maven.done -->
+ <!-- keep these properties out of ${dist.maven}/build.properties, dumped in pack-maven.done -->
<target name="init.maven" depends="init">
<property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
<property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
@@ -1840,20 +1837,34 @@ MAIN DISTRIBUTION PACKAGING
<!-- ===========================================================================
MAVEN PUBLISHING
============================================================================ -->
- <!-- TODO: inline maven-deploy.xml here and remove it, once jenkins jobs no longer rely on it -->
- <target name="publish" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the maven repo."> <deploy dir="${maven-base}/"/> </target>
- <target name="publish.local" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the local maven repo."> <deploy dir="${maven-base}/" local="true"/> </target>
- <target name="publish.signed" depends="pack-maven.base, init.maven" description="Publishes signed artifacts to the remote maven repo."> <deploy dir="${maven-base}/" signed="true"/> </target>
+ <target name="publish" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the maven repo.">
+ <deploy />
+ <deploy-pom name="scala-library-all"/>
+ <deploy-jar name="scala-dist"/>
+ </target>
+
+ <target name="publish.local" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the local maven repo.">
+ <deploy local="true"/>
+ <deploy-pom name="scala-library-all" local="true"/>
+ <deploy-jar name="scala-dist" local="true"/>
+ </target>
+
+ <target name="publish.signed" depends="pack-maven.base, init.maven" description="Publishes signed artifacts to the remote maven repo.">
+ <deploy signed="true"/>
+ <deploy-pom name="scala-library-all" signed="true"/>
+ <deploy-jar name="scala-dist" signed="true"/>
+ </target>
<target name="publish-core" depends="pack-maven.core, init.maven">
- <deploy-one dir="${maven-base}/" name="scala-compiler" />
- <deploy-one dir="${maven-base}/" name="scala-library" />
- <deploy-one dir="${maven-base}/" name="scala-reflect" />
+ <deploy-one name="scala-compiler" />
+ <deploy-one name="scala-library" />
+ <deploy-one name="scala-reflect" />
</target>
+
<target name="publish-core-local" depends="pack-maven.core, init.maven">
- <deploy-one dir="${maven-base}/" name="scala-compiler" local="true"/>
- <deploy-one dir="${maven-base}/" name="scala-library" local="true"/>
- <deploy-one dir="${maven-base}/" name="scala-reflect" local="true"/>
+ <deploy-one name="scala-compiler" local="true"/>
+ <deploy-one name="scala-library" local="true"/>
+ <deploy-one name="scala-reflect" local="true"/>
</target>
<target name="publish-core-opt" description="Builds an untested optimised core (library/reflect/compiler) and publishes to maven.">
diff --git a/docs/LICENSE b/doc/LICENSE.md
index 4daedef581..6b039afd68 100644
--- a/docs/LICENSE
+++ b/doc/LICENSE.md
@@ -11,14 +11,14 @@ All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
- * Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
- * Neither the name of the EPFL nor the names of its contributors
- may be used to endorse or promote products derived from this software
- without specific prior written permission.
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
@@ -39,18 +39,22 @@ which are also included in the `licenses/` directory:
### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html)
This license is used by the following third-party libraries:
+
* jansi
### [BSD License](http://www.opensource.org/licenses/bsd-license.php)
This license is used by the following third-party libraries:
+
* jline
### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause)
This license is used by the following third-party libraries:
+
* asm
### [MIT License](http://www.opensource.org/licenses/MIT)
This license is used by the following third-party libraries:
+
* jquery
* jquery-ui
* jquery-layout
@@ -59,5 +63,6 @@ This license is used by the following third-party libraries:
### Public Domain
The following libraries are freely available in the public domain:
+
* forkjoin
diff --git a/doc/License.rtf b/doc/License.rtf
new file mode 100644
index 0000000000..62ec2d023c
--- /dev/null
+++ b/doc/License.rtf
@@ -0,0 +1,65 @@
+{\rtf1\ansi\ansicpg1252\cocoartf1187\cocoasubrtf400
+{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
+{\colortbl;\red255\green255\blue255;}
+\margl1440\margr1440\vieww25140\viewh18960\viewkind0
+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural
+
+\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}.\
+\
+
+\fs48 Scala License
+\fs40 \
+
+\fs26 Copyright (c) 2002-2013 EPFL\
+Copyright (c) 2011-2013 Typesafe, Inc.\
+All rights reserved.\
+\
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\
+ \'95 Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\
+ \'95 Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\
+ \'95 Neither the name of the EPFL nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\
+\
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'94 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\
+
+\fs52 \
+
+\fs48 Other Licenses
+\fs52 \
+
+\fs26 This software includes projects with the following licenses, which are also included in the
+\fs24 licenses/
+\fs26 directory:\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt Apache License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jansi\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt BSD License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jline\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 asm\
+
+\fs30 \
+{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/MIT"}}{\fldrslt MIT License}}\
+
+\fs26 This license is used by the following third-party libraries:\
+ \'95 jquery\
+ \'95 jquery-ui\
+ \'95 jquery-layout\
+ \'95 sizzle\
+ \'95 tools tooltip\
+
+\fs30 \
+Public Domain\
+
+\fs26 The following libraries are freely available in the public domain:\
+ \'95 forkjoin} \ No newline at end of file
diff --git a/docs/README b/doc/README
index 1d5f553d2e..29f64c9fef 100644
--- a/docs/README
+++ b/doc/README
@@ -13,7 +13,7 @@ Scala Tools
- scalac Scala compiler
- fsc Scala resident compiler
- scaladoc Scala API documentation generator
-- scalap Scala classfile decoder
+- scalap Scala classfile decoder
Run the command "scalac -help" to display the list of available
compiler options.
diff --git a/docs/licenses/apache_jansi.txt b/doc/licenses/apache_jansi.txt
index 067a5a6a34..067a5a6a34 100644
--- a/docs/licenses/apache_jansi.txt
+++ b/doc/licenses/apache_jansi.txt
diff --git a/docs/licenses/bsd_asm.txt b/doc/licenses/bsd_asm.txt
index 8613cd33a2..8613cd33a2 100644
--- a/docs/licenses/bsd_asm.txt
+++ b/doc/licenses/bsd_asm.txt
diff --git a/docs/licenses/bsd_jline.txt b/doc/licenses/bsd_jline.txt
index 3e5dba75da..3e5dba75da 100644
--- a/docs/licenses/bsd_jline.txt
+++ b/doc/licenses/bsd_jline.txt
diff --git a/docs/licenses/mit_jquery-layout.txt b/doc/licenses/mit_jquery-layout.txt
index 4af6a0a4b0..4af6a0a4b0 100644
--- a/docs/licenses/mit_jquery-layout.txt
+++ b/doc/licenses/mit_jquery-layout.txt
diff --git a/docs/licenses/mit_jquery-ui.txt b/doc/licenses/mit_jquery-ui.txt
index be226805d3..be226805d3 100644
--- a/docs/licenses/mit_jquery-ui.txt
+++ b/doc/licenses/mit_jquery-ui.txt
diff --git a/docs/licenses/mit_jquery.txt b/doc/licenses/mit_jquery.txt
index ef2c570469..ef2c570469 100644
--- a/docs/licenses/mit_jquery.txt
+++ b/doc/licenses/mit_jquery.txt
diff --git a/docs/licenses/mit_sizzle.txt b/doc/licenses/mit_sizzle.txt
index d81d30aa0f..d81d30aa0f 100644
--- a/docs/licenses/mit_sizzle.txt
+++ b/doc/licenses/mit_sizzle.txt
diff --git a/docs/licenses/mit_tools.tooltip.txt b/doc/licenses/mit_tools.tooltip.txt
index 27a4dbc788..27a4dbc788 100644
--- a/docs/licenses/mit_tools.tooltip.txt
+++ b/doc/licenses/mit_tools.tooltip.txt
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 412d7caab6..a2c3eefbca 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -1,21 +1,34 @@
<?xml version="1.0" encoding="UTF-8"?>
+<!--
+THIS FILE WILL SOON SELF DESTRUCT; DO NOT USE
+see publish.* targets in /build.xml
+-->
<project name="sabbus-maven-deploy" xmlns:artifact="urn:maven-artifact-ant">
<description>
SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY
</description>
+
<macrodef name="deploy-remote">
<attribute name="jar" default=""/>
<attribute name="pom"/>
<element name="artifacts" implicit="true" optional="true"/>
<sequential>
- <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
- <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:deploy>
+ <if><equals arg1="@{jar}" arg2="true"/><then>
+ <artifact:deploy settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:deploy>
+ </then><else>
+ <artifact:deploy file="@{jar}" settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:deploy>
+ </else></if>
</sequential>
</macrodef>
@@ -24,11 +37,19 @@
<attribute name="pom"/>
<element name="artifacts" implicit="true" optional="true"/>
<sequential>
- <artifact:install file="@{jar}">
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{pom}" />
- <artifacts/>
- </artifact:install>
+ <if><equals arg1="@{jar}" arg2="true"/><then>
+ <artifact:install>
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:install>
+ </then><else>
+ <artifact:install file="@{jar}">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{pom}" />
+ <artifacts/>
+ </artifact:install>
+ </else></if>
</sequential>
</macrodef>
@@ -46,26 +67,25 @@
</sequential>
</macrodef>
- <macrodef name="deploy-one">
- <attribute name="dir" default=""/>
+ <macrodef name="filter-pom">
+ <attribute name="path" />
<attribute name="name" />
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
<sequential>
- <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/>
-
- <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
-
<copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
<filterset>
<filter token="VERSION" value="${maven.version.number}" />
<filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
<filter token="XML_VERSION" value="${scala-xml.version.number}" />
<filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
+ <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" />
+ <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
+ <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" />
<filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
<filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
<filter token="JLINE_VERSION" value="${jline.version}" />
+ <filter token="AKKA_ACTOR_VERSION" value="${akka-actor.version.number}" />
+ <filter token="ACTORS_MIGRATION_VERSION" value="${actors-migration.version.number}" />
<!-- TODO modularize compiler.
<filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}" />
@@ -74,6 +94,20 @@
</filterset>
</copy>
<artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
+ </sequential>
+ </macrodef>
+
+ <macrodef name="deploy-one">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
<if><equals arg1="@{signed}" arg2="false"/><then>
<if><isset property="docs.skip"/><then>
@@ -108,26 +142,94 @@
</sequential>
</macrodef>
+ <macrodef name="deploy-jar">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}.jar with ${path}-pom.xml.</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom"/>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn failonerror="true">
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}.jar" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="deploy-pom">
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-pom.xml.</echo>
+
+ <filter-pom name="@{name}" path="@{path}"/>
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <deploy-to local="@{local}" pom="@{name}.pom"/>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn failonerror="true">
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}-pom-filtered.xml" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
+ </sequential>
+ </macrodef>
+
<macrodef name="deploy">
- <attribute name="dir" default=""/>
<attribute name="local" default="false"/>
<attribute name="signed" default="false"/>
<sequential>
- <deploy-one dir="@{dir}" name="scala-library" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-reflect" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-compiler" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-library" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-reflect" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler" local="@{local}" signed="@{signed}"/>
<!-- TODO modularize compiler.
- <deploy-one dir="@{dir}" name="scala-compiler-doc" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler-doc" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/>
-->
- <deploy-one dir="@{dir}" name="scala-actors" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="@{dir}" name="scalap" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-actors" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scalap" local="@{local}" signed="@{signed}"/>
</sequential>
</macrodef>
+
<target name="boot.maven">
<!-- Pull in properties from build -->
<property file="build.properties" />
@@ -162,7 +264,18 @@
</echo>
</target>
- <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target>
- <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target>
- <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target>
+ <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish` FROM /build.xml"/>
+ <deploy/>
+ </target>
+
+ <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.local` FROM /build.xml"/>
+ <deploy local="true"/>
+ </target>
+
+ <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo.">
+ <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.signed` FROM /build.xml"/>
+ <deploy signed="true"/>
+ </target>
</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index a16fe22343..4a000b27a1 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -50,7 +50,7 @@
<artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
<version>@PARSER_COMBINATORS_VERSION@</version>
</dependency>
- <dependency> <!-- for scala-compiler-repl-->
+ <dependency> <!-- for scala-compiler-repl; once it moves there, make it required -->
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>@JLINE_VERSION@</version>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
new file mode 100644
index 0000000000..413da928bb
--- /dev/null
+++ b/src/build/maven/scala-dist-pom.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-dist</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Distribution Artifacts</name>
+ <description>The Artifacts Distributed with Scala</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library-all</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <!-- duplicated from scala-compiler, where it's optional,
+ so that resolving scala-dist's transitive dependencies does not include jline,
+ even though we need to include it in the dist, but macros depending on the compiler
+ shouldn't have to require jline...
+ another reason to modularize and move the dependency to scala-compiler-repl
+ TODO: remove duplication once we have the scala-compiler-repl module -->
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>@JLINE_VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
new file mode 100644
index 0000000000..f34a28e79a
--- /dev/null
+++ b/src/build/maven/scala-library-all-pom.xml
@@ -0,0 +1,99 @@
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library-all</artifactId>
+ <packaging>pom</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Library Powerpack</name>
+ <description>The Scala Standard Library and Official Modules</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@XML_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@PARSER_COMBINATORS_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>scala-continuations-plugin_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@CONTINUATIONS_PLUGIN_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>scala-continuations-library_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@CONTINUATIONS_LIBRARY_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-swing_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@SCALA_SWING_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-actor_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@AKKA_ACTOR_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors-migration_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@ACTORS_MIGRATION_VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
index bb88c8d5e1..5e931817b5 100644
--- a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
@@ -8,10 +8,6 @@ trait Enclosures {
import universe._
- type MacroRole = analyzer.MacroRole
- def APPLY_ROLE = analyzer.APPLY_ROLE
- def macroRole: MacroRole
-
private lazy val site = callsiteTyper.context
private lazy val enclTrees = site.enclosingContextChain map (_.tree)
private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
index 7de3341304..14b0c6baba 100644
--- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -19,8 +19,14 @@ trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes
* @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
* `null` otherwise.
*/
+ def macroRuntime(expandee: Tree): MacroRuntime = pluginsMacroRuntime(expandee)
+
+ /** Default implementation of `macroRuntime`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroRuntime for more details)
+ */
private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
- def macroRuntime(macroDef: Symbol): MacroRuntime = {
+ def standardMacroRuntime(expandee: Tree): MacroRuntime = {
+ val macroDef = expandee.symbol
macroLogVerbose(s"looking for macro implementation: $macroDef")
if (fastTrack contains macroDef) {
macroLogVerbose("macro expansion is serviced by a fast track")
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 61ea9230a7..d122a1a207 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -623,15 +623,6 @@ self =>
syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
- /** Check that tree is a legal clause of a forSome. */
- def checkLegalExistential(t: Tree) = t match {
- case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
- ValDef(_, _, _, EmptyTree) | EmptyTree =>
- ;
- case _ =>
- syntaxError(t.pos, "not a legal existential clause", skipIt = false)
- }
-
/* -------------- TOKEN CLASSES ------------------------------------------- */
def isModifier: Boolean = in.token match {
@@ -885,9 +876,14 @@ self =>
}
}
private def makeExistentialTypeTree(t: Tree) = {
- val whereClauses = refinement()
- whereClauses foreach checkLegalExistential
- ExistentialTypeTree(t, whereClauses)
+ // EmptyTrees in the result of refinement() stand for parse errors
+ // so it's okay for us to filter them out here
+ ExistentialTypeTree(t, refinement() flatMap {
+ case t @ TypeDef(_, _, _, TypeBoundsTree(_, _)) => Some(t)
+ case t @ ValDef(_, _, _, EmptyTree) => Some(t)
+ case EmptyTree => None
+ case _ => syntaxError(t.pos, "not a legal existential clause", skipIt = false); None
+ })
}
/** {{{
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 183752d4a2..7837f9a11a 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -126,10 +126,11 @@ object Plugin {
}
/** Load all plugins specified by the arguments.
- * Each of `jars` must be a valid plugin archive or exploded archive.
+ * Each location of `paths` must be a valid plugin archive or exploded archive.
+ * Each of `paths` must define one plugin.
* Each of `dirs` may be a directory containing arbitrary plugin archives.
* Skips all plugins named in `ignoring`.
- * A single classloader is created and used to load all of them.
+ * A classloader is created to load each plugin.
*/
def loadAllFrom(
paths: List[List[Path]],
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 7568c789fb..6ec364bcb6 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -94,11 +94,11 @@ trait ScalaSettings extends AbsScalaSettings
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
- val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
- val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
+ val plugin = MultiStringSetting("-Xplugin", "paths", "Load a plugin from each classpath.")
+ val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable plugins by name.")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
- val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
- val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
+ val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.")
+ val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath)
val Xprint = PhasesSetting ("-Xprint", "Print out program after")
val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
@@ -172,7 +172,8 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
+ val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal)
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None)
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
@@ -249,4 +250,9 @@ trait ScalaSettings extends AbsScalaSettings
def isBCodeAskedFor = (Ybackend.value != "GenASM")
def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser)
+ object MacroExpand {
+ val None = "none"
+ val Normal = "normal"
+ val Discard = "discard"
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 9738769db9..f14fce5de9 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -481,18 +481,33 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* For instance, say we have a Scala class:
*
* class Cls {
- * // ...
- * def someSymbol = `symbolic
- * // ...
+ * def someSymbol1 = 'Symbolic1
+ * def someSymbol2 = 'Symbolic2
+ * def sameSymbol1 = 'Symbolic1
+ * val someSymbol3 = 'Symbolic3
* }
*
* After transformation, this class looks like this:
*
* class Cls {
- * private "static" val <some_name>$symbolic = Symbol("symbolic")
- * // ...
- * def someSymbol = <some_name>$symbolic
- * // ...
+ * private <static> var symbol$1: scala.Symbol
+ * private <static> var symbol$2: scala.Symbol
+ * private <static> var symbol$3: scala.Symbol
+ * private val someSymbol3: scala.Symbol
+ *
+ * private <static> def <clinit> = {
+ * symbol$1 = Symbol.apply("Symbolic1")
+ * symbol$2 = Symbol.apply("Symbolic2")
+ * }
+ *
+ * private def <init> = {
+ * someSymbol3 = symbol$3
+ * }
+ *
+ * def someSymbol1 = symbol$1
+ * def someSymbol2 = symbol$2
+ * def sameSymbol1 = symbol$1
+ * val someSymbol3 = someSymbol3
* }
*
* The reasoning behind this transformation is the following. Symbols get interned - they are stored
@@ -502,17 +517,17 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* is accessed only once during class loading, and after that, the unique symbol is in the static
* member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
*
- * And, finally, be advised - scala symbol literal and the Symbol class of the compiler
+ * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
*/
case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
def transformApply = {
- // add the symbol name to a map if it's not there already
- val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
- val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
- // create a reference to a static field
- val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
- super.transform(ntree)
+ // add the symbol name to a map if it's not there already
+ val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+ val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
+ // create a reference to a static field
+ val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
+ super.transform(ntree)
}
transformApply
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 54e4fefc15..fa6e5399eb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -13,7 +13,6 @@ package typechecker
trait AnalyzerPlugins { self: Analyzer =>
import global._
-
trait AnalyzerPlugin {
/**
* Selectively activate this analyzer plugin, e.g. according to the compiler phase.
@@ -156,6 +155,117 @@ trait AnalyzerPlugins { self: Analyzer =>
def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
}
+ /**
+ * @define nonCumulativeReturnValueDoc Returns `None` if the plugin doesn't want to customize the default behavior
+ * or something else if the plugin knows better that the implementation provided in scala-compiler.jar.
+ * If multiple plugins return a non-empty result, it's going to be a compilation error.
+ */
+ trait MacroPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Typechecks the right-hand side of a macro definition (which typically features
+ * a mere reference to a macro implementation).
+ *
+ * Default implementation provided in `self.standardTypedMacroBody` makes sure that the rhs
+ * resolves to a reference to a method in either a static object or a macro bundle,
+ * verifies that the referred method is compatible with the macro def and upon success
+ * attaches a macro impl binding to the macro def's symbol.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None
+
+ /**
+ * Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set),
+ * possibly using the current typer mode and the provided prototype.
+ *
+ * Default implementation provided in `self.standardMacroExpand` figures out whether the `expandee`
+ * needs to be expanded right away or its expansion has to be delayed until all undetermined
+ * parameters are inferred, then loads the macro implementation using `self.pluginsMacroRuntime`,
+ * prepares the invocation arguments for the macro implementation using `self.pluginsMacroArgs`,
+ * and finally calls into the macro implementation. After the call returns, it typechecks
+ * the expansion and performs some bookkeeping.
+ *
+ * This method is typically implemented if your plugin requires significant changes to the macro engine.
+ * If you only need to customize the macro context, consider implementing `pluginsMacroArgs`.
+ * If you only need to customize how macro implementation are invoked, consider going for `pluginsMacroRuntime`.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = None
+
+ /**
+ * Computes the arguments that need to be passed to the macro impl corresponding to a particular expandee.
+ *
+ * Default implementation provided in `self.standardMacroArgs` instantiates a `scala.reflect.macros.contexts.Context`,
+ * gathers type and value arguments of the macro application and throws them together into `MacroArgs`.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = None
+
+ /**
+ * Summons a function that encapsulates macro implementation invocations for a particular expandee.
+ *
+ * Default implementation provided in `self.standardMacroRuntime` returns a function that
+ * loads the macro implementation binding from the macro definition symbol,
+ * then uses either Java or Scala reflection to acquire the method that corresponds to the impl,
+ * and then reflectively calls into that method.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = None
+
+ /**
+ * Creates a symbol for the given tree in lexical context encapsulated by the given namer.
+ *
+ * Default implementation provided in `namer.standardEnterSym` handles MemberDef's and Imports,
+ * doing nothing for other trees (DocDef's are seen through and rewrapped). Typical implementation
+ * of `enterSym` for a particular tree flavor creates a corresponding symbol, assigns it to the tree,
+ * enters the symbol into scope and then might even perform some code generation.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = false
+
+ /**
+ * Makes sure that for the given class definition, there exists a companion object definition.
+ *
+ * Default implementation provided in `namer.standardEnsureCompanionObject` looks up a companion symbol for the class definition
+ * and then checks whether the resulting symbol exists or not. If it exists, then nothing else is done.
+ * If not, a synthetic object definition is created using the provided factory, which is then entered into namer's scope.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = None
+
+ /**
+ * Prepares a list of statements for being typechecked by performing domain-specific type-agnostic code synthesis.
+ *
+ * Trees passed into this method are going to be named, but not typed.
+ * In particular, you can rely on the compiler having called `enterSym` on every stat prior to passing calling this method.
+ *
+ * Default implementation does nothing. Current approaches to code syntheses (generation of underlying fields
+ * for getters/setters, creation of companion objects for case classes, etc) are too disparate and ad-hoc
+ * to be treated uniformly, so I'm leaving this for future work.
+ */
+ def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = stats
+ }
+
/** A list of registered analyzer plugins */
@@ -167,59 +277,158 @@ trait AnalyzerPlugins { self: Analyzer =>
analyzerPlugins = plugin :: analyzerPlugins
}
+ private abstract class CumulativeOp[T] {
+ def default: T
+ def accumulate: (T, AnalyzerPlugin) => T
+ }
+
+ private def invoke[T](op: CumulativeOp[T]): T = {
+ if (analyzerPlugins.isEmpty) op.default
+ else analyzerPlugins.foldLeft(op.default)((current, plugin) =>
+ if (!plugin.isActive()) current else op.accumulate(current, plugin))
+ }
/** @see AnalyzerPlugin.pluginsPt */
def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type =
+ // performance opt
if (analyzerPlugins.isEmpty) pt
- else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
- if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+ else invoke(new CumulativeOp[Type] {
+ def default = pt
+ def accumulate = (pt, p) => p.pluginsPt(pt, typer, tree, mode)
+ })
/** @see AnalyzerPlugin.pluginsTyped */
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
- // support deprecated methods in annotation checkers
- val annotCheckersTpe = addAnnotations(tree, tpe)
- if (analyzerPlugins.isEmpty) annotCheckersTpe
- else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
- }
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type =
+ // performance opt
+ if (analyzerPlugins.isEmpty) addAnnotations(tree, tpe)
+ else invoke(new CumulativeOp[Type] {
+ // support deprecated methods in annotation checkers
+ def default = addAnnotations(tree, tpe)
+ def accumulate = (tpe, p) => p.pluginsTyped(tpe, typer, tree, mode, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypeSig */
- def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
- if (analyzerPlugins.isEmpty) tpe
- else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = invoke(new CumulativeOp[Type] {
+ def default = tpe
+ def accumulate = (tpe, p) => p.pluginsTypeSig(tpe, typer, defTree, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypeSigAccessor */
- def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
- if (analyzerPlugins.isEmpty) tpe
- else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = invoke(new CumulativeOp[Type] {
+ def default = tpe
+ def accumulate = (tpe, p) => p.pluginsTypeSigAccessor(tpe, typer, tree, sym)
+ })
/** @see AnalyzerPlugin.canAdaptAnnotations */
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = invoke(new CumulativeOp[Boolean] {
// support deprecated methods in annotation checkers
- val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
- annotCheckersExists || {
- if (analyzerPlugins.isEmpty) false
- else analyzerPlugins.exists(plugin =>
- plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
- }
- }
+ def default = global.canAdaptAnnotations(tree, mode, pt)
+ def accumulate = (curr, p) => curr || p.canAdaptAnnotations(tree, typer, mode, pt)
+ })
/** @see AnalyzerPlugin.adaptAnnotations */
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = {
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = invoke(new CumulativeOp[Tree] {
// support deprecated methods in annotation checkers
- val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
- if (analyzerPlugins.isEmpty) annotCheckersTree
- else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
- if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
- }
+ def default = global.adaptAnnotations(tree, mode, pt)
+ def accumulate = (tree, p) => p.adaptAnnotations(tree, typer, mode, pt)
+ })
/** @see AnalyzerPlugin.pluginsTypedReturn */
- def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
- val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
- if (analyzerPlugins.isEmpty) annotCheckersType
- else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
- if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = invoke(new CumulativeOp[Type] {
+ def default = adaptTypeOfReturn(tree.expr, pt, tpe)
+ def accumulate = (tpe, p) => p.pluginsTypedReturn(tpe, typer, tree, pt)
+ })
+
+ /** A list of registered macro plugins */
+ private var macroPlugins: List[MacroPlugin] = Nil
+
+ /** Registers a new macro plugin */
+ def addMacroPlugin(plugin: MacroPlugin) {
+ if (!macroPlugins.contains(plugin))
+ macroPlugins = plugin :: macroPlugins
+ }
+
+ private abstract class NonCumulativeOp[T] {
+ def position: Position
+ def description: String
+ def default: T
+ def custom(plugin: MacroPlugin): Option[T]
+ }
+
+ private def invoke[T](op: NonCumulativeOp[T]): T = {
+ if (macroPlugins.isEmpty) op.default
+ else {
+ val results = macroPlugins.filter(_.isActive()).map(plugin => (plugin, op.custom(plugin)))
+ results.flatMap { case (p, Some(result)) => Some((p, result)); case _ => None } match {
+ case (p1, _) :: (p2, _) :: _ => typer.context.error(op.position, s"both $p1 and $p2 want to ${op.description}"); op.default
+ case (_, custom) :: Nil => custom
+ case Nil => op.default
+ }
+ }
+ }
+
+ /** @see MacroPlugin.pluginsTypedMacroBody */
+ def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Tree = invoke(new NonCumulativeOp[Tree] {
+ def position = ddef.pos
+ def description = "typecheck this macro definition"
+ def default = standardTypedMacroBody(typer, ddef)
+ def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef)
+ })
+
+ /** @see MacroPlugin.pluginsMacroExpand */
+ def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] {
+ def position = expandee.pos
+ def description = "expand this macro application"
+ def default = standardMacroExpand(typer, expandee, mode, pt)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroExpand(typer, expandee, mode, pt)
+ })
+
+ /** @see MacroPlugin.pluginsMacroArgs */
+ def pluginsMacroArgs(typer: Typer, expandee: Tree): MacroArgs = invoke(new NonCumulativeOp[MacroArgs] {
+ def position = expandee.pos
+ def description = "compute macro arguments for this macro application"
+ def default = standardMacroArgs(typer, expandee)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroArgs(typer, expandee)
+ })
+
+ /** @see MacroPlugin.pluginsMacroRuntime */
+ def pluginsMacroRuntime(expandee: Tree): MacroRuntime = invoke(new NonCumulativeOp[MacroRuntime] {
+ def position = expandee.pos
+ def description = "compute macro runtime for this macro application"
+ def default = standardMacroRuntime(expandee)
+ def custom(plugin: MacroPlugin) = plugin.pluginsMacroRuntime(expandee)
+ })
+
+ /** @see MacroPlugin.pluginsEnterSym */
+ def pluginsEnterSym(namer: Namer, tree: Tree): Context =
+ if (macroPlugins.isEmpty) namer.standardEnterSym(tree)
+ else invoke(new NonCumulativeOp[Context] {
+ def position = tree.pos
+ def description = "enter a symbol for this tree"
+ def default = namer.standardEnterSym(tree)
+ def custom(plugin: MacroPlugin) = {
+ val hasExistingSym = tree.symbol != NoSymbol
+ val result = plugin.pluginsEnterSym(namer, tree)
+ if (result && hasExistingSym) Some(namer.context)
+ else if (result && tree.isInstanceOf[Import]) Some(namer.context.make(tree))
+ else if (result) Some(namer.context)
+ else None
+ }
+ })
+
+ /** @see MacroPlugin.pluginsEnsureCompanionObject */
+ def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = invoke(new NonCumulativeOp[Symbol] {
+ def position = cdef.pos
+ def description = "enter a companion symbol for this tree"
+ def default = namer.standardEnsureCompanionObject(cdef, creator)
+ def custom(plugin: MacroPlugin) = plugin.pluginsEnsureCompanionObject(namer, cdef, creator)
+ })
+
+ /** @see MacroPlugin.pluginsEnterStats */
+ def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+ // performance opt
+ if (macroPlugins.isEmpty) stats
+ else macroPlugins.foldLeft(stats)((current, plugin) =>
+ if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, stats))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 719d04a7f9..cd6b77404d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -726,8 +726,9 @@ trait ContextErrors {
NormalTypeError(expandee, "too many argument lists for " + fun)
}
- def MacroInvalidExpansionError(expandee: Tree, role: String, allowedExpansions: String) = {
- issueNormalTypeError(expandee, s"macro in $role role can only expand into $allowedExpansions")
+ def MacroIncompatibleEngineError(macroEngine: String) = {
+ val message = s"macro cannot be expanded, because it was compiled by an incompatible macro engine $macroEngine"
+ issueNormalTypeError(lastTreeToTyper, message)
}
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 0d46a96b81..a1e77515a8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -123,16 +123,15 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
*
* @scala.reflect.macros.internal.macroImpl(
* `macro`(
+ * "macroEngine" = <current macro engine>,
* "isBundle" = false,
* "isBlackbox" = true,
* "signature" = List(Other),
* "methodName" = "impl",
- * "versionFormat" = <current version format>,
* "className" = "Macros$"))
*/
+ def macroEngine = "v7.0 (implemented in Scala 2.11.0-M8)"
object MacroImplBinding {
- val versionFormat = 6.0
-
def pickleAtom(obj: Any): Tree =
obj match {
case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
@@ -183,12 +182,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
val payload = List[(String, Any)](
- "versionFormat" -> versionFormat,
- "isBundle" -> isBundle,
- "isBlackbox" -> isBlackbox,
- "className" -> className,
- "methodName" -> macroImpl.name.toString,
- "signature" -> signature
+ "macroEngine" -> macroEngine,
+ "isBundle" -> isBundle,
+ "isBlackbox" -> isBlackbox,
+ "className" -> className,
+ "methodName" -> macroImpl.name.toString,
+ "signature" -> signature
)
// the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload.
@@ -237,8 +236,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
raw.asInstanceOf[T]
}
- val pickleVersionFormat = unpickle("versionFormat", classOf[Double])
- if (versionFormat != pickleVersionFormat) fail(s"expected version format $versionFormat, actual $pickleVersionFormat")
+ val macroEngine = unpickle("macroEngine", classOf[String])
+ if (self.macroEngine != macroEngine) typer.TyperErrorGen.MacroIncompatibleEngineError(macroEngine)
val isBundle = unpickle("isBundle", classOf[Boolean])
val isBlackbox = unpickle("isBlackbox", classOf[Boolean])
@@ -315,7 +314,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* @return Macro impl reference for the given macro definition if everything is okay.
* EmptyTree if an error occurs.
*/
- def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = pluginsTypedMacroBody(typer, macroDdef)
+
+ /** Default implementation of `typedMacroBody`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsTypedMacroBody for more details)
+ */
+ def standardTypedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
val macroDef = macroDdef.symbol
assert(macroDef.isMacro, macroDdef)
@@ -350,7 +354,6 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
val universe: self.global.type = self.global
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse duplicateAndKeepPositions(expandeeTree)
- val macroRole = universe.analyzer.macroExpanderAttachment(expandeeTree).role
} with UnaffiliatedMacroContext {
val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
@@ -360,8 +363,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
*/
case class MacroArgs(c: MacroContext, others: List[Any])
+ def macroArgs(typer: Typer, expandee: Tree): MacroArgs = pluginsMacroArgs(typer, expandee)
- private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
+ /** Default implementation of `macroArgs`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroArgs for more details)
+ */
+ def standardMacroArgs(typer: Typer, expandee: Tree): MacroArgs = {
val macroDef = expandee.symbol
val paramss = macroDef.paramss
val treeInfo.Applied(core, targs, argss) = expandee
@@ -473,18 +480,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Keeps track of macros in-flight.
* See more informations in comments to `openMacros` in `scala.reflect.macros.WhiteboxContext`.
*/
- private var _openMacros = List[MacroContext]()
+ var _openMacros = List[MacroContext]()
def openMacros = _openMacros
- private def pushMacroContext(c: MacroContext) = _openMacros ::= c
- private def popMacroContext() = _openMacros = _openMacros.tail
+ def pushMacroContext(c: MacroContext) = _openMacros ::= c
+ def popMacroContext() = _openMacros = _openMacros.tail
def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
- /** Describes the role that the macro expandee is performing.
- */
- type MacroRole = scala.tools.nsc.typechecker.MacroRole
- final def APPLY_ROLE = MacroRole.Apply
- final def UNAPPLY_ROLE = MacroRole.Unapply
-
/** Performs macro expansion:
*
* ========= Expandable trees =========
@@ -527,30 +528,24 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
* the expandee with an error marker set if there has been an error
*/
- private abstract class MacroExpander[Result: ClassTag](val role: MacroRole, val typer: Typer, val expandee: Tree) {
- def allowExpandee(expandee: Tree): Boolean = true
- def allowExpanded(expanded: Tree): Boolean = true
- def allowedExpansions: String = "anything"
- def allowResult(result: Result): Boolean = true
-
- def onSuccess(expanded: Tree): Result
- def onFallback(expanded: Tree): Result
- def onSuppressed(expandee: Tree): Result = expandee match { case expandee: Result => expandee }
- def onDelayed(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
- def onSkipped(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
- def onFailure(expanded: Tree): Result = { typer.infer.setError(expandee); expandee match { case expandee: Result => expandee } }
-
- def apply(desugared: Tree): Result = {
+ abstract class MacroExpander(val typer: Typer, val expandee: Tree) {
+ def onSuccess(expanded: Tree): Tree
+ def onFallback(expanded: Tree): Tree
+ def onSuppressed(expandee: Tree): Tree = expandee
+ def onDelayed(expanded: Tree): Tree = expanded
+ def onSkipped(expanded: Tree): Tree = expanded
+ def onFailure(expanded: Tree): Tree = { typer.infer.setError(expandee); expandee }
+
+ def apply(desugared: Tree): Tree = {
if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee)
else expand(desugared)
}
- protected def expand(desugared: Tree): Result = {
+ protected def expand(desugared: Tree): Tree = {
def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true)
def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
- assert(allowExpandee(expandee), summary())
- linkExpandeeAndDesugared(expandee, desugared, role)
+ linkExpandeeAndDesugared(expandee, desugared)
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
@@ -562,21 +557,17 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
onFailure(typer.infer.setError(expandee))
} else try {
val expanded = {
- val runtime = macroRuntime(expandee.symbol)
+ val runtime = macroRuntime(expandee)
if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
else macroExpandWithoutRuntime(typer, expandee)
}
expanded match {
case Success(expanded) =>
- if (allowExpanded(expanded)) {
- // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
- val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
- if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
- if (allowResult(expanded1)) expanded1 else onFailure(expanded)
- } else {
- typer.TyperErrorGen.MacroInvalidExpansionError(expandee, role.name, allowedExpansions)
- onFailure(expanded)
- }
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+ if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+ if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe)
+ else expanded1
case Fallback(fallback) => onFallback(fallback)
case Delayed(delayed) => onDelayed(delayed)
case Skipped(skipped) => onSkipped(skipped)
@@ -592,151 +583,136 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
}
- /** Expands a tree that carries a term, which happens to be a term macro.
- * @see MacroExpander
- */
- private abstract class TermMacroExpander(role: MacroRole, typer: Typer, expandee: Tree, mode: Mode, pt: Type)
- extends MacroExpander[Tree](role, typer, expandee) {
- override def allowedExpansions: String = "term trees"
- override def allowExpandee(expandee: Tree) = expandee.isTerm
- override def onSuccess(expanded: Tree) = typer.typed(expanded, mode, pt)
- override def onFallback(fallback: Tree) = typer.typed(fallback, mode, pt)
- }
-
/** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
* @param outerPt Expected type that comes from enclosing context (something that's traditionally called `pt`).
* @param innerPt Expected type that comes from the signature of a macro def, possibly wildcarded to help type inference.
- * @see MacroExpander
*/
- def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type): Tree = {
- object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, outerPt) {
- lazy val innerPt = {
- val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe
- if (isBlackbox(expandee)) tp
- else {
- // approximation is necessary for whitebox macros to guide type inference
- // read more in the comments for onDelayed below
- val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
- deriveTypeWithWildcards(undetparams)(tp)
- }
+ class DefMacroExpander(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type)
+ extends MacroExpander(typer, expandee) {
+ lazy val innerPt = {
+ val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe
+ if (isBlackbox(expandee)) tp
+ else {
+ // approximation is necessary for whitebox macros to guide type inference
+ // read more in the comments for onDelayed below
+ val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
+ deriveTypeWithWildcards(undetparams)(tp)
}
- override def onSuccess(expanded0: Tree) = {
- // prematurely annotate the tree with a macro expansion attachment
- // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
- linkExpandeeAndExpanded(expandee, expanded0)
-
- def typecheck(label: String, tree: Tree, pt: Type): Tree = {
- if (tree.isErrorTyped) tree
- else {
- if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree")
- // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
- // therefore we need to re-enable the conversions back temporarily
- val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt))
- if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}")
- result
- }
- }
+ }
+ override def onSuccess(expanded0: Tree) = {
+ // prematurely annotate the tree with a macro expansion attachment
+ // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
+ linkExpandeeAndExpanded(expandee, expanded0)
- if (isBlackbox(expandee)) {
- val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt)))
- typecheck("blackbox typecheck", expanded1, outerPt)
- } else {
- val expanded1 = expanded0
- val expanded2 = typecheck("whitebox typecheck #1", expanded1, outerPt)
- typecheck("whitebox typecheck #2", expanded2, innerPt)
+ def typecheck(label: String, tree: Tree, pt: Type): Tree = {
+ if (tree.isErrorTyped) tree
+ else {
+ if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree")
+ // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
+ // therefore we need to re-enable the conversions back temporarily
+ val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt))
+ if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}")
+ result
}
}
- override def onDelayed(delayed: Tree) = {
- // =========== THE SITUATION ===========
- //
- // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
- // then there are two possible situations we're in:
- // 1) We're in POLYmode, when the typer tests the waters wrt type inference
- // (e.g. as in typedArgToPoly in doTypedApply).
- // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
- // (e.g. if we're an argument to a function call, then this means that no previous argument lists
- // can determine our type variables for us).
- //
- // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
- // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
- //
- // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
- // the undetermined type params. Therefore we need to do something ourselves or otherwise this
- // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
- // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
- // but sometimes, if the inferencer lacks information, it will be forced to approximate.
- //
- // =========== THE PROBLEM ===========
- //
- // Consider the following example (thanks, Miles!):
- //
- // Iso represents an isomorphism between two datatypes:
- // 1) An arbitrary one (e.g. a random case class)
- // 2) A uniform representation for all datatypes (e.g. an HList)
- //
- // trait Iso[T, U] {
- // def to(t : T) : U
- // def from(u : U) : T
- // }
- // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
- //
- // case class Foo(i: Int, s: String, b: Boolean)
- // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
- // foo(Foo(23, "foo", true))
- //
- // In the snippet above, even though we know that there's a fundep going from T to U
- // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
- // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
- // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
- //
- // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) ===========
- //
- // To give materializers a chance to say their word before vanilla inference kicks in,
- // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
- // and then trigger macro expansion with the undetermined type parameters still there.
- // Thanks to that the materializer can take a look at what's going on and react accordingly.
- val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
- if (shouldInstantiate) {
- if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt)
- else {
- forced += delayed
- typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false)
- macroExpandApply(typer, delayed, mode, outerPt)
- }
- } else delayed
+
+ if (isBlackbox(expandee)) {
+ val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt)))
+ typecheck("blackbox typecheck", expanded1, outerPt)
+ } else {
+ val expanded1 = expanded0
+ val expanded2 = typecheck("whitebox typecheck #1", expanded1, outerPt)
+ typecheck("whitebox typecheck #2", expanded2, innerPt)
}
}
- expander(expandee)
+ override def onDelayed(delayed: Tree) = {
+ // =========== THE SITUATION ===========
+ //
+ // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+ // then there are two possible situations we're in:
+ // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+ // (e.g. as in typedArgToPoly in doTypedApply).
+ // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+ // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+ // can determine our type variables for us).
+ //
+ // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+ // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+ //
+ // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+ // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+ // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+ // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate.
+ //
+ // =========== THE PROBLEM ===========
+ //
+ // Consider the following example (thanks, Miles!):
+ //
+ // Iso represents an isomorphism between two datatypes:
+ // 1) An arbitrary one (e.g. a random case class)
+ // 2) A uniform representation for all datatypes (e.g. an HList)
+ //
+ // trait Iso[T, U] {
+ // def to(t : T) : U
+ // def from(u : U) : T
+ // }
+ // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+ //
+ // case class Foo(i: Int, s: String, b: Boolean)
+ // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+ // foo(Foo(23, "foo", true))
+ //
+ // In the snippet above, even though we know that there's a fundep going from T to U
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+ // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+ //
+ // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) ===========
+ //
+ // To give materializers a chance to say their word before vanilla inference kicks in,
+ // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
+ // and then trigger macro expansion with the undetermined type parameters still there.
+ // Thanks to that the materializer can take a look at what's going on and react accordingly.
+ val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
+ if (shouldInstantiate) {
+ if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt)
+ else {
+ forced += delayed
+ typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false)
+ macroExpand(typer, delayed, mode, outerPt)
+ }
+ } else delayed
+ }
+ override def onFallback(fallback: Tree) = typer.typed(fallback, mode, outerPt)
}
- /** Expands a term macro used in unapply role as `u.Quasiquote(StringContext("", "")).q.unapply(x)` in `case q"$x" => ...`.
- * @see MacroExpander
+ /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+ * @see DefMacroExpander
*/
- def macroExpandUnapply(typer: Typer, original: Tree, fun: Tree, unapply: Symbol, args: List[Tree], mode: Mode, pt: Type) = {
- val expandee = treeCopy.Apply(original, gen.mkAttributedSelect(fun, unapply), args)
- object expander extends TermMacroExpander(UNAPPLY_ROLE, typer, expandee, mode, pt) {
- override def allowedExpansions: String = "unapply trees"
- override def allowExpandee(expandee: Tree) = expandee.isInstanceOf[Apply]
- private def unsupported(what: String) = abort("unapply macros currently don't support " + what)
- override def onFallback(fallback: Tree) = unsupported("fallback")
- override def onDelayed(delayed: Tree) = unsupported("advanced interaction with type inference")
- }
- expander(original)
+ def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt)
+
+ /** Default implementation of `macroExpand`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details)
+ */
+ def standardMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = {
+ val expander = new DefMacroExpander(typer, expandee, mode, pt)
+ expander(expandee)
}
- private sealed abstract class MacroStatus(val result: Tree)
- private case class Success(expanded: Tree) extends MacroStatus(expanded)
- private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
- private case class Delayed(delayed: Tree) extends MacroStatus(delayed)
- private case class Skipped(skipped: Tree) extends MacroStatus(skipped)
- private case class Failure(failure: Tree) extends MacroStatus(failure)
- private def Delay(expanded: Tree) = Delayed(expanded)
- private def Skip(expanded: Tree) = Skipped(expanded)
+ sealed abstract class MacroStatus(val result: Tree)
+ case class Success(expanded: Tree) extends MacroStatus(expanded)
+ case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+ case class Delayed(delayed: Tree) extends MacroStatus(delayed)
+ case class Skipped(skipped: Tree) extends MacroStatus(skipped)
+ case class Failure(failure: Tree) extends MacroStatus(failure)
+ def Delay(expanded: Tree) = Delayed(expanded)
+ def Skip(expanded: Tree) = Skipped(expanded)
/** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
+ def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
val wasDelayed = isDelayed(expandee)
val undetparams = calculateUndetparams(expandee)
val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
@@ -767,7 +743,31 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
macroLogLite("" + expanded + "\n" + showRaw(expanded))
val freeSyms = expanded.freeTerms ++ expanded.freeTypes
freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
- Success(atPos(enclosingMacroPosition.focus)(expanded))
+ // Macros might have spliced arguments with range positions into non-compliant
+ // locations, notably, under a tree without a range position. Or, they might
+ // splice a tree that `resetAttrs` has assigned NoPosition.
+ //
+ // Here, we just convert all positions in the tree to offset positions, and
+ // convert NoPositions to something sensible.
+ //
+ // Given that the IDE now sees the expandee (by using -Ymacro-expand:discard),
+ // this loss of position fidelity shouldn't cause any real problems.
+ //
+ // Alternatively, we could pursue a way to exclude macro expansions from position
+ // invariant checking, or find a way not to touch expansions that happen to validate.
+ //
+ // This would be useful for cases like:
+ //
+ // macro1 { macro2 { "foo" } }
+ //
+ // to allow `macro1` to see the range position of the "foo".
+ val expandedPos = enclosingMacroPosition.focus
+ def fixPosition(pos: Position) =
+ if (pos == NoPosition) expandedPos else pos.focus
+ expanded.foreach(t => t.pos = fixPosition(t.pos))
+
+ val result = atPos(enclosingMacroPosition.focus)(expanded)
+ Success(result)
}
expanded match {
case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
@@ -793,7 +793,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
+ def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
import typer.TyperErrorGen._
val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
macroLogLite(s"falling back to: $fallbackSym")
@@ -871,7 +871,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
context.implicitsEnabled = typer.context.implicitsEnabled
context.enrichmentEnabled = typer.context.enrichmentEnabled
context.macrosEnabled = typer.context.macrosEnabled
- macroExpandApply(newTyper(context), tree, EXPRmode, WildcardType)
+ macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
case _ =>
tree
})
@@ -902,12 +902,3 @@ object Fingerprint {
val LiftedTyped = new Fingerprint(-2)
val LiftedUntyped = new Fingerprint(-3)
}
-
-class MacroRole private[MacroRole](val name: String) extends AnyVal {
- override def toString = name
-}
-
-object MacroRole {
- val Apply = new MacroRole("apply")
- val Unapply = new MacroRole("unapply")
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 73057c83f4..27e8698676 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -22,7 +22,7 @@ trait Namers extends MethodSynthesis {
import global._
import definitions._
- private var _lockedCount = 0
+ var _lockedCount = 0
def lockedCount = this._lockedCount
/** Replaces any Idents for which cond is true with fresh TypeTrees().
@@ -107,8 +107,8 @@ trait Namers extends MethodSynthesis {
}
protected def owner = context.owner
- private def contextFile = context.unit.source.file
- private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
+ def contextFile = context.unit.source.file
+ def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
case ex: TypeError =>
// H@ need to ensure that we handle only cyclic references
TypeSigError(tree, ex)
@@ -264,7 +264,12 @@ trait Namers extends MethodSynthesis {
validate(sym2.companionClass)
}
- def enterSym(tree: Tree): Context = {
+ def enterSym(tree: Tree): Context = pluginsEnterSym(this, tree)
+
+ /** Default implementation of `enterSym`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnterSym for more details)
+ */
+ def standardEnterSym(tree: Tree): Context = {
def dispatch() = {
var returnContext = this.context
tree match {
@@ -330,7 +335,7 @@ trait Namers extends MethodSynthesis {
* be transferred to the symbol as they are, supply a mask containing
* the flags to keep.
*/
- private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
+ def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
val pos = tree.pos
val isParameter = tree.mods.isParameter
val flags = tree.mods.flags & mask
@@ -348,14 +353,14 @@ trait Namers extends MethodSynthesis {
else owner.newValue(name.toTermName, pos, flags)
}
}
- private def createFieldSymbol(tree: ValDef): TermSymbol =
+ def createFieldSymbol(tree: ValDef): TermSymbol =
owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
- private def createImportSymbol(tree: Tree) =
+ def createImportSymbol(tree: Tree) =
NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
/** All PackageClassInfoTypes come from here. */
- private def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
+ def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
val pkgOwner = pid match {
case Ident(_) => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner
case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass
@@ -414,7 +419,7 @@ trait Namers extends MethodSynthesis {
/** Given a ClassDef or ModuleDef, verifies there isn't a companion which
* has been defined in a separate file.
*/
- private def validateCompanionDefs(tree: ImplDef) {
+ def validateCompanionDefs(tree: ImplDef) {
val sym = tree.symbol orElse { return }
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
@@ -487,7 +492,13 @@ trait Namers extends MethodSynthesis {
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
+ def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol =
+ pluginsEnsureCompanionObject(this, cdef, creator)
+
+ /** Default implementation of `ensureCompanionObject`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnsureCompanionObject for more details)
+ */
+ def standardEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
val m = companionSymbolOf(cdef.symbol, context)
// @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
// otherwise. documentation welcome.
@@ -845,9 +856,10 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- val rhsTpe =
- if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
- else defnTyper.computeType(tree.rhs, pt)
+ val rhsTpe = tree match {
+ case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt)
+ case _ => defnTyper.computeType(tree.rhs, pt)
+ }
val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
tree.tpt defineType defnTpe setPos tree.pos.focus
@@ -1149,7 +1161,7 @@ trait Namers extends MethodSynthesis {
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
+ addDefaultGetters(meth, ddef, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
@@ -1191,7 +1203,12 @@ trait Namers extends MethodSynthesis {
* typechecked, the corresponding param would not yet have the "defaultparam"
* flag.
*/
- private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+ private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+ val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetLocalAttrs(ddef.duplicate)
+ // having defs here is important to make sure that there's no sneaky tree sharing
+ // in methods with multiple default parameters
+ def rtparams = rtparams0.map(_.duplicate)
+ def rvparamss = rvparamss0.map(_.map(_.duplicate))
val methOwner = meth.owner
val isConstr = meth.isConstructor
val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
@@ -1223,23 +1240,36 @@ trait Namers extends MethodSynthesis {
//
vparamss.foldLeft(Nil: List[List[ValDef]]) { (previous, vparams) =>
assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
+ val rvparams = rvparamss(previous.length)
var baseParams = if (overrides) baseParamss.head else Nil
- for (vparam <- vparams) {
+ map2(vparams, rvparams)((vparam, rvparam) => {
val sym = vparam.symbol
// true if the corresponding parameter of the base class has a default argument
val baseHasDefault = overrides && baseParams.head.hasDefault
if (sym.hasDefault) {
- // generate a default getter for that argument
+ // Create a "default getter", i.e. a DefDef that will calculate vparam.rhs
+ // for those who are going to call meth without providing an argument corresponding to vparam.
+ // After the getter is created, a corresponding synthetic symbol is created and entered into the parent namer.
+ //
+ // In the ideal world, this DefDef would be a simple one-liner that just returns vparam.rhs,
+ // but in scalac things are complicated in two different ways.
+ //
+ // 1) Because the underlying language is quite sophisticated, we must allow for those sophistications in our getter.
+ // Namely: a) our getter has to copy type parameters from the associated method (or the associated class
+ // if meth is a constructor), because vparam.rhs might refer to one of them, b) our getter has to copy
+ // preceding value parameter lists from the associated method, because again vparam.rhs might refer to one of them.
+ //
+ // 2) Because we have already assigned symbols to type and value parameters that we have to copy, we must jump through
+ // hoops in order to destroy them and allow subsequent naming create new symbols for our getter. Previously this
+ // was done in an overly brutal way akin to resetAllAttrs, but now we utilize a resetLocalAttrs-based approach.
+ // Still far from ideal, but at least enables things like run/macro-default-params that were previously impossible.
+
val oflag = if (baseHasDefault) OVERRIDE else 0
val name = nme.defaultGetterName(meth.name, posCounter)
- // Create trees for the defaultGetter. Uses tools from Unapplies.scala
- var deftParams = tparams map copyUntyped[TypeDef]
- val defvParamss = mmap(previous) { p =>
- // in the default getter, remove the default parameter
- val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
- UnTyper.traverse(p1)
- p1
+ var defTparams = rtparams
+ val defVparamss = mmap(rvparamss.take(previous.length)){ rvp =>
+ copyValDef(rvp)(mods = rvp.mods &~ DEFAULTPARAM, rhs = EmptyTree)
}
val parentNamer = if (isConstr) {
@@ -1261,7 +1291,8 @@ trait Namers extends MethodSynthesis {
return // fix #3649 (prevent crash in erroneous source code)
}
}
- deftParams = cdef.tparams map copyUntypedInvariant
+ val ClassDef(_, _, rtparams, _) = resetLocalAttrs(cdef.duplicate)
+ defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT)))
nmr
}
else ownerNamer getOrElse {
@@ -1272,23 +1303,30 @@ trait Namers extends MethodSynthesis {
nmr
}
- // If the parameter type mentions any type parameter of the method, let the compiler infer the
- // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
- // This is better than always using Wildcard for inferring the result type, for example in
- // def f(i: Int, m: Int => Int = identity _) = m(i)
- // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
- val names = deftParams map { case TypeDef(_, name, _, _) => name }
- val subst = new TypeTreeSubstituter(names contains _)
-
- val defTpt = subst(copyUntyped(vparam.tpt match {
- // default getter for by-name params
- case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
- case t => t
- }))
- val defRhs = copyUntyped(vparam.rhs)
+ val defTpt =
+ // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree()
+ // will break the carefully orchestrated naming/typing logic that involves enterCopyMethod and caseClassCopyMeth
+ if (meth.isCaseCopy) TypeTree()
+ else {
+ // If the parameter type mentions any type parameter of the method, let the compiler infer the
+ // return type of the default getter => allow "def foo[T](x: T = 1)" to compile.
+ // This is better than always using Wildcard for inferring the result type, for example in
+ // def f(i: Int, m: Int => Int = identity _) = m(i)
+ // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
+ // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene
+ // will open the doors to a much better way of doing this kind of stuff
+ val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name }
+ val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _)
+ eraseAllMentionsOfTparams(rvparam.tpt match {
+ // default getter for by-name params
+ case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg
+ case t => t
+ })
+ }
+ val defRhs = rvparam.rhs
val defaultTree = atPos(vparam.pos.focus) {
- DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, deftParams, defvParamss, defTpt, defRhs)
+ DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, defTparams, defVparamss, defTpt, defRhs)
}
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
@@ -1303,7 +1341,7 @@ trait Namers extends MethodSynthesis {
}
posCounter += 1
if (overrides) baseParams = baseParams.tail
- }
+ })
if (overrides) baseParamss = baseParamss.tail
previous :+ vparams
}
@@ -1611,7 +1649,7 @@ trait Namers extends MethodSynthesis {
val tree: Tree
}
- def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter {
+ def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter with FlagAgnosticCompleter {
val tree = t
def completeImpl(sym: Symbol) = c(sym)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 14f47a00fd..995f98cc2c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -16,7 +16,7 @@ trait StdAttachments {
/** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime.
*/
- case class MacroExpanderAttachment(original: Tree, desugared: Tree, role: MacroRole)
+ case class MacroExpanderAttachment(original: Tree, desugared: Tree)
/** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment.
*/
@@ -24,15 +24,15 @@ trait StdAttachments {
tree.attachments.get[MacroExpanderAttachment] getOrElse {
tree match {
case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn)
- case _ => MacroExpanderAttachment(tree, EmptyTree, APPLY_ROLE)
+ case _ => MacroExpanderAttachment(tree, EmptyTree)
}
}
/** After macro expansion is completed, links the expandee and the expansion result
* by annotating them both with a `MacroExpansionAttachment`.
*/
- def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree, role: MacroRole): Unit = {
- val metadata = MacroExpanderAttachment(expandee, desugared, role)
+ def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree): Unit = {
+ val metadata = MacroExpanderAttachment(expandee, desugared)
expandee updateAttachment metadata
desugared updateAttachment metadata
}
@@ -95,7 +95,7 @@ trait StdAttachments {
/** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
*/
def isMacroExpansionSuppressed(tree: Tree): Boolean =
- ( settings.Ymacronoexpand.value // SI-6812
+ ( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
|| tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined
|| (tree match {
// we have to account for the fact that during typechecking an expandee might become wrapped,
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 910da77ca8..6b5afce993 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -56,16 +56,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
resetDocComments()
}
- object UnTyper extends Traverser {
- override def traverse(tree: Tree) = {
- if (tree.canHaveAttrs) {
- tree.clearType()
- if (tree.hasSymbolField) tree.symbol = NoSymbol
- }
- super.traverse(tree)
- }
- }
-
sealed abstract class SilentResult[+T] {
@inline final def fold[U](none: => U)(f: T => U): U = this match {
case SilentResultValue(value) => f(value)
@@ -1112,7 +1102,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (tree.isType)
adaptType()
else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree))
- macroExpandApply(this, tree, mode, pt)
+ macroExpand(this, tree, mode, pt)
else if (mode.typingConstructorPattern)
typedConstructorPattern(tree, pt)
else if (shouldInsertApply(tree))
@@ -1863,12 +1853,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
protected def enterSym(txt: Context, tree: Tree): Context =
- if (txt eq context) namer.enterSym(tree)
- else newNamer(txt).enterSym(tree)
+ if (txt eq context) namer enterSym tree
+ else newNamer(txt) enterSym tree
/** <!-- 2 --> Check that inner classes do not inherit from Annotation
*/
- def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
+ def typedTemplate(templ0: Template, parents1: List[Tree]): Template = {
+ val templ = templ0
+ // please FIXME: uncommenting this line breaks everything
+ // val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents)
val clazz = context.owner
clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
@@ -1896,7 +1889,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
)
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
- enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
+ val body1 = pluginsEnterStats(this, templ.body)
+ enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1)
if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
validateParentClasses(parents1, selfType)
if (clazz.isCase)
@@ -1910,26 +1904,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
- val body = {
- val body =
- if (isPastTyper || reporter.hasErrors) templ.body
- else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
- val primaryCtor = treeInfo.firstConstructor(body)
+ val body2 = {
+ val body2 =
+ if (isPastTyper || reporter.hasErrors) body1
+ else body1 flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+ val primaryCtor = treeInfo.firstConstructor(body2)
val primaryCtor1 = primaryCtor match {
case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
val argss = superArgs(parents1.head) getOrElse Nil
- val pos = wrappingPos(parents1.head.pos, argss.flatten)
+ val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent
val superCall = atPos(pos)(PrimarySuperCall(argss))
deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
case _ => primaryCtor
}
- body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+ body2 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
}
- val body1 = typedStats(body, templ.symbol)
+ val body3 = typedStats(body2, templ.symbol)
if (clazz.info.firstParent.typeSymbol == AnyValClass)
- validateDerivedValueClass(clazz, body1)
+ validateDerivedValueClass(clazz, body3)
if (clazz.isTrait) {
for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
@@ -1937,7 +1931,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
- treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe_*
+ treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_*
}
/** Remove definition annotations from modifiers (they have been saved
@@ -2319,10 +2313,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
- def typedBlock(block: Block, mode: Mode, pt: Type): Block = {
+ def typedBlock(block0: Block, mode: Mode, pt: Type): Block = {
val syntheticPrivates = new ListBuffer[Symbol]
try {
- namer.enterSyms(block.stats)
+ namer.enterSyms(block0.stats)
+ val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr)
for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
@@ -3807,7 +3802,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
for (wc <- tree.whereClauses)
- if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
+ if (wc.symbol == NoSymbol) { namer enterSym wc; wc.symbol setFlag EXISTENTIAL }
else context.scope enter wc.symbol
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
for (vd @ ValDef(_, _, _, _) <- whereClauses1)
@@ -4250,7 +4245,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
val ids = for (p <- params) yield Ident(p.name)
val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- val body = treeCopy.Match(tree, selector1, cases)
+ // SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
+ // receive symbols owned by this function. However if, after a silent mode session, we discard
+ // this Function and try a different approach (e.g. applying a view to the reciever) we end up
+ // with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
+ val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
}
} else
@@ -4954,7 +4953,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
- def typedPackageDef(pdef: PackageDef) = {
+ def typedPackageDef(pdef0: PackageDef) = {
+ val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats))
val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
assert(sym.moduleClass ne NoSymbol, sym)
val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
@@ -5500,25 +5500,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
tpe
}
- def computeMacroDefType(tree: Tree, pt: Type): Type = {
+ def computeMacroDefType(ddef: DefDef, pt: Type): Type = {
assert(context.owner.isMacro, context.owner)
- assert(tree.symbol.isMacro, tree.symbol)
- assert(tree.isInstanceOf[DefDef], tree.getClass)
- val ddef = tree.asInstanceOf[DefDef]
+ assert(ddef.symbol.isMacro, ddef.symbol)
- val tree1 =
+ val rhs1 =
if (transformed contains ddef.rhs) {
// macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap
// if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree`
// here we guard against this case
transformed(ddef.rhs)
} else {
- val tree1 = typedMacroBody(this, ddef)
- transformed(ddef.rhs) = tree1
- tree1
+ val rhs1 = typedMacroBody(this, ddef)
+ transformed(ddef.rhs) = rhs1
+ rhs1
}
- val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
+ val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
if (isMacroBodyOkay && shouldInheritMacroImplReturnType) {
val commonMessage = "macro defs must have explicitly specified return types"
@@ -5530,7 +5528,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)")
}
- computeMacroDefTypeFromMacroImplRef(ddef, tree1) match {
+ computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
case ErrorType => ErrorType
case NothingTpe => NothingTpe
case NoType => reportFailure(); AnyTpe
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index ed96f66ab8..ffac29b4b8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -43,12 +43,6 @@ trait Unapplies extends ast.TreeDSL {
def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
}
- def copyUntyped[T <: Tree](tree: T): T =
- returning[T](tree.duplicate)(UnTyper traverse _)
-
- def copyUntypedInvariant(td: TypeDef): TypeDef =
- copyTypeDef(copyUntyped(td))(mods = td.mods &~ (COVARIANT | CONTRAVARIANT))
-
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
@@ -58,8 +52,15 @@ trait Unapplies extends ast.TreeDSL {
}
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
- val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
- mmap(vparamss)(copyUntyped[ValDef])
+ val ClassDef(_, _, _, Template(_, _, body)) = resetLocalAttrs(cdef.duplicate)
+ val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body
+ vparamss
+ }
+
+ private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = {
+ val ClassDef(_, _, tparams, _) = resetLocalAttrs(cdef.duplicate)
+ val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT)))
+ tparamsInvariant
}
/** The return value of an unapply method of a case class C[Ts]
@@ -125,7 +126,7 @@ trait Unapplies extends ast.TreeDSL {
/** The apply method corresponding to a case class
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val cparamss = constrParamss(cdef)
def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
@@ -141,7 +142,7 @@ trait Unapplies extends ast.TreeDSL {
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
@@ -196,7 +197,7 @@ trait Unapplies extends ast.TreeDSL {
treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
}
- val tparams = cdef.tparams map copyUntypedInvariant
+ val tparams = constrTparamsInvariant(cdef)
val paramss = classParamss match {
case Nil => Nil
case ps :: pss =>
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 27f10ff00a..b55a573029 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -88,7 +88,6 @@ trait InteractiveAnalyzer extends Analyzer {
}
}
-
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
@@ -105,6 +104,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
import definitions._
+ if (!settings.Ymacroexpand.isSetByUser)
+ settings.Ymacroexpand.value = settings.MacroExpand.Discard
+
val debugIDE: Boolean = settings.YpresentationDebug.value
val verboseIDE: Boolean = settings.YpresentationVerbose.value
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index bc490d8d45..343986a45d 100644
--- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -94,7 +94,7 @@ private[tests] trait CoreTestDefs
askAllSources(HyperlinkMarker) { pos =>
askTypeAt(pos)(NullReporter)
} { (pos, tree) =>
- if(tree.symbol == compiler.NoSymbol) {
+ if(tree.symbol == compiler.NoSymbol || tree.symbol == null) {
reporter.println("\nNo symbol is associated with tree: "+tree)
}
else {
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index 0722d808bf..4795a47efe 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -2,21 +2,54 @@ This is the documentation for the Scala standard library.
== Package structure ==
-The [[scala]] package contains core types.
-
-[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation.
-
-Other important packages include:
-
- - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang.
- - [[scala.io `scala.io`]] - Input and output.
- - [[scala.math `scala.math`]] - Basic math functions and additional numeric types.
- - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system.
- - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions.
- - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing.
- - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization.
-
-Many other packages exist. See the complete list on the left.
+The [[scala]] package contains core types like [[scala.Int `Int`]], [[scala.Float `Float`]], [[scala.Array `Array`]]
+or [[scala.Option `Option`]] which are accessible in all Scala compilation units without explicit qualification or
+imports.
+
+Notable packages include:
+
+ - [[scala.collection `scala.collection`]] and its sub-packages contain Scala's collections framework
+ - [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as
+ [[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]],
+ [[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or
+ [[scala.collection.immutable.HashSet `HasSet`]]
+ - [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as
+ [[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]],
+ [[scala.collection.mutable.StringBuilder `StringBuilder`]],
+ [[scala.collection.mutable.HashMap `HashMap`]] or [[scala.collection.mutable.HashSet `HashSet`]]
+ - [[scala.collection.concurrent `scala.collection.concurrent`]] - Mutable, concurrent data-structures such as
+ [[scala.collection.concurrent.TrieMap `TrieMap`]]
+ - [[scala.collection.parallel.immutable `scala.collection.parallel.immutable`]] - Immutable, parallel
+ data-structures such as [[scala.collection.parallel.immutable.ParVector `ParVector`]],
+ [[scala.collection.parallel.immutable.ParRange `ParRange`]],
+ [[scala.collection.parallel.immutable.ParHashMap `ParHashMap`]] or
+ [[scala.collection.parallel.immutable.ParHashSet `ParHashSet`]]
+ - [[scala.collection.parallel.mutable `scala.collection.parallel.mutable`]] - Mutable, parallel
+ data-structures such as [[scala.collection.parallel.mutable.ParArray `ParArray`]],
+ [[scala.collection.parallel.mutable.ParHashMap `ParHashMap`]],
+ [[scala.collection.parallel.mutable.ParTrieMap `ParTrieMap`]] or
+ [[scala.collection.parallel.mutable.ParHashSet `ParHashSet`]]
+ - [[scala.concurrent `scala.concurrent`]] - Primitives for concurrent programming such as
+ [[scala.concurrent.Future `Futures`]] and [[scala.concurrent.Promise `Promises`]]
+ - [[scala.io `scala.io`]] - Input and output operations
+ - [[scala.math `scala.math`]] - Basic math functions and additional numeric types like
+ [[scala.math.BigInt `BigInt`]] and [[scala.math.BigDecimal `BigDecimal`]]
+ - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system
+ - [[scala.util.matching `scala.util.matching`]] - [[scala.util.matching.Regex Regular expressions]]
+
+Other packages exist. See the complete list on the left.
+
+Additional parts of the standard library are shipped as separate libraries. These include:
+
+ - [[scala.reflect `scala.reflect`]] - Scala's reflection API (scala-reflect.jar)
+ - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar)
+ - [[scala.swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar)
+ - [[scala.util.continuations `scala.util.continuations`]] - Delimited continuations using continuation-passing-style
+ (scala-continuations-library.jar, scala-continuations-plugin.jar)
+ - [[scala.util.parsing `scala.util.parsing`]] - [[scala.util.parsing.combinator Parser combinators]], including an
+ example implementation of a [[scala.util.parsing.json JSON parser]] (scala-parser-combinators.jar)
+ - [[scala.actors `scala.actors`]] - Actor-based concurrency (deprecated and replaced by Akka actors,
+ scala-actors.jar)
== Automatic imports ==
diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala
index 9def6cb054..ff62948413 100644
--- a/src/library/scala/AnyVal.scala
+++ b/src/library/scala/AnyVal.scala
@@ -33,7 +33,7 @@ package scala
*
* User-defined value classes which avoid object allocation...
*
- * - must have a single, public `val` parameter that is the underlying runtime representation.
+ * - must have a single `val` parameter that is the underlying runtime representation.
* - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s.
* - typically extend no other trait apart from `AnyVal`.
* - cannot be used in type tests or pattern matching.
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index 90a8977e81..ef39ee2134 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -28,9 +28,8 @@ import scala.collection.mutable.ListBuffer
* functionality, which means that fields of the object will not have been initialized
* before the main method has been executed.'''''
*
- * It should also be noted that the `main` method will not normally need to be overridden:
- * the purpose is to turn the whole class body into the “main method”. You should only
- * chose to override it if you know what you are doing.
+ * It should also be noted that the `main` method should not be overridden:
+ * the whole class body becomes the “main method”.
*
* @author Martin Odersky
* @version 2.1, 15/02/2011
@@ -61,11 +60,12 @@ trait App extends DelayedInit {
}
/** The main method.
- * This stores all argument so that they can be retrieved with `args`
- * and the executes all initialization code segments in the order they were
- * passed to `delayedInit`
+ * This stores all arguments so that they can be retrieved with `args`
+ * and then executes all initialization code segments in the order in which
+ * they were passed to `delayedInit`.
* @param args the arguments passed to the main method
*/
+ @deprecatedOverriding("main should not be overridden", "2.11.0")
def main(args: Array[String]) = {
this._args = args
for (proc <- initCode) proc()
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index 27b75c0491..c3bad60072 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -38,8 +38,8 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* Example:
*
* {{{
- * scala> val x = LinkedList(1, 2, 3, 4, 5)
- * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4, 5)
+ * scala> val x = List(1, 2, 3, 4, 5)
+ * x: List[Int] = List(1, 2, 3, 4, 5)
*
* scala> x(3)
* res1: Int = 4
@@ -190,7 +190,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*/
def lastIndexWhere(p: A => Boolean, end: Int): Int
- /** Returns new $coll wih elements in reversed order.
+ /** Returns new $coll with elements in reversed order.
*
* $willNotTerminateInf
*
@@ -302,14 +302,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* Example:
* {{{
- * scala> val x = LinkedList(1)
- * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ * scala> val x = List(1)
+ * x: List[Int] = List(1)
*
* scala> val y = 2 +: x
- * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1)
+ * y: List[Int] = List(2, 1)
*
* scala> println(x)
- * LinkedList(1)
+ * List(1)
* }}}
*
* @return a new $coll consisting of `elem` followed
@@ -335,17 +335,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* Example:
* {{{
- * scala> import scala.collection.mutable.LinkedList
- * import scala.collection.mutable.LinkedList
- *
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
- *
+ * scala> val a = List(1)
+ * a: List[Int] = List(1)
+ *
* scala> val b = a :+ 2
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- *
+ * b: List[Int] = List(1, 2)
+ *
* scala> println(a)
- * LinkedList(1)
+ * List(1)
* }}}
*
* @return a new $coll consisting of
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index a0c519884c..ca098e57b9 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -267,20 +267,20 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
*
* Example:
* {{{
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
- *
- * scala> val b = LinkedList(2)
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
- *
+ * scala> val a = List(1)
+ * a: List[Int] = List(1)
+ *
+ * scala> val b = List(2)
+ * b: List[Int] = List(2)
+ *
* scala> val c = a ++ b
- * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- *
- * scala> val d = LinkedList('a')
- * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
- *
+ * c: List[Int] = List(1, 2)
+ *
+ * scala> val d = List('a')
+ * d: List[Char] = List(a)
+ *
* scala> val e = c ++ d
- * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+ * e: List[AnyVal] = List(1, 2, a)
* }}}
*
* @return a new $coll which contains all elements of this $coll
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index a9fe279599..01d179aeb6 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -130,8 +130,8 @@ trait GenTraversableOnce[+A] extends Any {
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (5 /: a)(_+_)
* b: Int = 15
@@ -167,8 +167,8 @@ trait GenTraversableOnce[+A] extends Any {
*
* Note that the folding function used to compute b is equivalent to that used to compute c.
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
*
* scala> val b = (a :\ 5)(_+_)
* b: Int = 15
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 26af32046c..072fd3da44 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -320,14 +320,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
- * b: StringBuilder =
- *
- * scala> a.addString(b, "LinkedList(", ", ", ")")
- * res1: StringBuilder = LinkedList(1, 2, 3, 4)
+ * b: StringBuilder =
+ *
+ * scala> a.addString(b , "List(" , ", " , ")")
+ * res5: StringBuilder = List(1, 2, 3, 4)
* }}}
*
* @param b the string builder to which elements are appended.
@@ -362,9 +362,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
@@ -385,14 +385,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* Example:
*
* {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
+ * scala> val a = List(1,2,3,4)
+ * a: List[Int] = List(1, 2, 3, 4)
+ *
* scala> val b = new StringBuilder()
* b: StringBuilder =
*
* scala> val h = a.addString(b)
- * b: StringBuilder = 1234
+ * h: StringBuilder = 1234
* }}}
* @param b the string builder to which elements are appended.
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 56f1802509..14ae57c43a 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -102,8 +102,14 @@ private[collection] trait Wrappers {
override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
+ // Note various overrides to avoid performance gotchas.
class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
self =>
+ override def contains(o: Object): Boolean = {
+ try { underlying.contains(o.asInstanceOf[A]) }
+ catch { case cce: ClassCastException => false }
+ }
+ override def isEmpty = underlying.isEmpty
def size = underlying.size
def iterator = new ju.Iterator[A] {
val ui = underlying.iterator
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 486c2b6c8f..249d76584d 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -175,9 +175,36 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
catch { case _: ClassCastException => false }
final override def sum[B >: T](implicit num: Numeric[B]): B = {
- if (isEmpty) this.num fromInt 0
- else if (numRangeElements == 1) head
- else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
+ // arithmetic series formula can be used for regular addition
+ if ((num eq scala.math.Numeric.IntIsIntegral)||
+ (num eq scala.math.Numeric.BigIntIsIntegral)||
+ (num eq scala.math.Numeric.ShortIsIntegral)||
+ (num eq scala.math.Numeric.ByteIsIntegral)||
+ (num eq scala.math.Numeric.CharIsIntegral)||
+ (num eq scala.math.Numeric.LongIsIntegral)||
+ (num eq scala.math.Numeric.FloatAsIfIntegral)||
+ (num eq scala.math.Numeric.BigDecimalIsFractional)||
+ (num eq scala.math.Numeric.DoubleAsIfIntegral)) {
+ val numAsIntegral = num.asInstanceOf[Integral[B]]
+ import numAsIntegral._
+ if (isEmpty) num fromInt 0
+ else if (numRangeElements == 1) head
+ else ((num fromInt numRangeElements) * (head + last) / (num fromInt 2))
+ } else {
+ // user provided custom Numeric, we cannot rely on arithmetic series formula
+ if (isEmpty) num.zero
+ else {
+ var acc = num.zero
+ var i = head
+ var idx = 0
+ while(idx < length) {
+ acc = num.plus(acc, i)
+ i = i + step
+ idx = idx + 1
+ }
+ acc
+ }
+ }
}
override lazy val hashCode = super.hashCode()
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 589661a343..3a64820be6 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -188,7 +188,10 @@ extends scala.collection.AbstractSeq[T]
val s = start + _start
val e = if (_end == UndeterminedEnd) _end else start + _end
var f = first1
- while (f.end <= s && !f.isLast) f = f.next
+ while (f.end <= s && !f.isLast) {
+ if (f.next eq null) f.addMore(more)
+ f = f.next
+ }
new PagedSeq(more, f, s, e)
}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 00f398a4b0..786b18cd21 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -259,9 +259,24 @@ extends scala.collection.AbstractSeq[Int]
final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0)
final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
- if (isEmpty) 0
- else if (numRangeElements == 1) head
- else (numRangeElements.toLong * (head + last) / 2).toInt
+ if (num eq scala.math.Numeric.IntIsIntegral) {
+ // this is normal integer range with usual addition. arithmetic series formula can be used
+ if (isEmpty) 0
+ else if (numRangeElements == 1) head
+ else (numRangeElements.toLong * (head + last) / 2).toInt
+ } else {
+ // user provided custom Numeric, we cannot rely on arithmetic series formula
+ if (isEmpty) num.toInt(num.zero)
+ else {
+ var acc = num.zero
+ var i = head
+ while(i != terminalElement) {
+ acc = num.plus(acc, i)
+ i = i + step
+ }
+ num.toInt(acc)
+ }
+ }
}
override def toIterable = this
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index e6644c0dfc..eafbf96993 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -127,6 +127,8 @@ object Numeric {
def toLong(x: Float): Long = x.toLong
def toFloat(x: Float): Float = x
def toDouble(x: Float): Double = x.toDouble
+ // logic in Numeric base trait mishandles abs(-0.0f)
+ override def abs(x: Float): Float = math.abs(x)
}
trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] {
def div(x: Float, y: Float): Float = x / y
@@ -149,6 +151,8 @@ object Numeric {
def toLong(x: Double): Long = x.toLong
def toFloat(x: Double): Float = x.toFloat
def toDouble(x: Double): Double = x
+ // logic in Numeric base trait mishandles abs(-0.0)
+ override def abs(x: Double): Double = math.abs(x)
}
trait DoubleIsFractional extends DoubleIsConflicted with Fractional[Double] {
def div(x: Double, y: Double): Double = x / y
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 7129f22f60..986cd0390f 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -35,15 +35,3 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala
// let's not make it final so as not to confuse anyone
/*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
}
-
-// Manual stand-ins for formerly specialized variations.
-// Not comprehensive, only sufficent to run scala-check built scala 2.11.0-M5
-// TODO Scala 2.10.0.M6 Remove this once scalacheck is published against M6.
-private[runtime] abstract class AbstractPartialFunction$mcIL$sp extends scala.runtime.AbstractPartialFunction[Any, Int] {
- override def apply(x: Any): Int = apply$mcIL$sp(x)
- def apply$mcIL$sp(x: Any): Int = applyOrElse(x, PartialFunction.empty)
-}
-private[runtime] abstract class AbstractPartialFunction$mcFL$sp extends scala.runtime.AbstractPartialFunction[Any, Float] {
- override def apply(x: Any): Float = apply$mcIL$sp(x)
- def apply$mcIL$sp(x: Any): Float = applyOrElse(x, PartialFunction.empty)
-}
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index bbf80593db..2d5f832e1f 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -22,7 +22,7 @@ import scala.annotation.tailrec
* generated per call point, and will uniquely relate to the method called
* at that point, making the method name and argument types irrelevant. */
/* TODO: if performance is acceptable, PolyMethodCache should be made generic on the method type */
-sealed abstract class MethodCache {
+private[scala] sealed abstract class MethodCache {
/** Searches for a cached method in the `MethodCache` chain that
* is compatible with receiver class `forReceiver`. If none is cached,
* `null` is returned. If `null` is returned, find's caller should look-
@@ -32,7 +32,7 @@ sealed abstract class MethodCache {
def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache
}
-final class EmptyMethodCache extends MethodCache {
+private[scala] final class EmptyMethodCache extends MethodCache {
def find(forReceiver: JClass[_]): JMethod = null
@@ -41,7 +41,7 @@ final class EmptyMethodCache extends MethodCache {
}
-final class MegaMethodCache(
+private[scala] final class MegaMethodCache(
private[this] val forName: String,
private[this] val forParameterTypes: Array[JClass[_]]
) extends MethodCache {
@@ -53,7 +53,7 @@ final class MegaMethodCache(
}
-final class PolyMethodCache(
+private[scala] final class PolyMethodCache(
private[this] val next: MethodCache,
private[this] val receiver: JClass[_],
private[this] val method: JMethod,
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 13f2362d00..d597feb898 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -173,7 +173,7 @@ private[scala] trait PropertiesTrait {
* isJavaAtLeast("1.6") // true
* isJavaAtLeast("1.7") // true
* isJavaAtLeast("1.8") // false
- * }}
+ * }}}
*/
def isJavaAtLeast(version: String): Boolean = {
def parts(x: String) = {
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 276e157f55..2e021ad9d9 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -141,14 +141,14 @@ object Sorting {
var done = false
while (!done) {
while (b <= c && x(b) <= v) {
- if (x(b) == v) {
+ if (x(b) equiv v) {
swap(a, b)
a += 1
}
b += 1
}
while (c >= b && x(c) >= v) {
- if (x(c) == v) {
+ if (x(c) equiv v) {
swap(c, d)
d -= 1
}
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index f48b99bd5a..6b3be8b77f 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -39,16 +39,16 @@ object scala extends Command {
CmdOptionBound("howtorun:", Argument("how")),
"How to execute " & Argument("torun") & ", if it is present. " &
"Options for " & Argument("how") & " are " & Mono("guess") &
- " (the default), " & Mono("script") & ", and " & Mono("object") &
+ " (the default), " & Mono("script") & ", " & Mono("jar") & ", and " & Mono("object") &
"."),
Definition(
- CmdOption("i"),
+ CmdOption("i", Argument("file")),
"Requests that a file be pre-loaded. It is only " &
"meaningful for interactive shells."),
Definition(
- CmdOption("e"),
+ CmdOption("e", Argument("string")),
"Requests that its argument be executed as Scala code."),
Definition(
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 1c0c7c4a96..52e918595c 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -109,6 +109,9 @@ object scalac extends Command {
CmdOption("extdirs", Argument("dirs")),
"Override location of installed extensions."),
Definition(
+ CmdOption("feature"),
+ "Emit warning and location for usages of features that should be imported explicitly."),
+ Definition(
CmdOptionBound("g:", "{none,source,line,vars,notailcalls}"),
SeqPara(
Mono("\"none\"") & " generates no debugging info,",
@@ -128,6 +131,9 @@ object scalac extends Command {
CmdOption("javaextdirs", Argument("path")),
"Override Java extdirs classpath."),
Definition(
+ CmdOptionBound("language:", Argument("feature")),
+ "Enable one or more language features."),
+ Definition(
CmdOption("no-specialization"),
"Ignore " & MItalic("@specialize") & " annotations."),
Definition(
@@ -146,6 +152,12 @@ object scalac extends Command {
CmdOption("sourcepath", Argument("path")),
"Specify location(s) of source files."),
Definition(
+ CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7}"),
+ SeqPara(
+ Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),",
+ Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),",
+ Mono("\"jvm-1.7\"") & " target JVM 1.7,")),
+ Definition(
CmdOption("toolcp", Argument("path")),
"Add to the runner classpath."),
Definition(
@@ -159,6 +171,12 @@ object scalac extends Command {
CmdOption("uniqid"),
"Uniquely tag all identifiers in debugging output."),
Definition(
+ CmdOption("usejavacp"),
+ "Utilize the java.class.path in classpath resolution."),
+ Definition(
+ CmdOption("usemanifestcp"),
+ "Utilize the manifest in classpath resolution."),
+ Definition(
CmdOption("verbose"),
"Output messages about what the compiler is doing"),
Definition(
@@ -175,11 +193,11 @@ object scalac extends Command {
Section("Advanced Options",
DefinitionList(
Definition(
- CmdOption("Xcheck-null"),
- "Warn upon selection of nullable reference"),
- Definition(
CmdOption("Xcheckinit"),
"Wrap field accessors to throw an exception on uninitialized access."),
+ Definition(
+ CmdOption("Xdev"),
+ "Enable warnings for developers working on the Scala compiler"),
Definition(
CmdOption("Xdisable-assertions"),
"Generate no assertions and assumptions"),
@@ -193,6 +211,9 @@ object scalac extends Command {
Definition(
CmdOption("Xfatal-warnings"),
"Fail the compilation if there are any warnings."),
+ Definition(
+ CmdOption("Xfull-lubs"),
+ "Retain pre 2.10 behavior of less aggressive truncation of least upper bounds."),
Definition(
CmdOption("Xfuture"),
"Turn on future language features."),
@@ -202,18 +223,39 @@ object scalac extends Command {
Definition(
CmdOption("Xlint"),
"Enable recommended additional warnings."),
+ Definition(
+ CmdOption("Xlog-free-terms"),
+ "Print a message when reification creates a free term."),
+ Definition(
+ CmdOption("Xlog-free-types"),
+ "Print a message when reification resorts to generating a free type."),
+ Definition(
+ CmdOption("Xlog-implicit-conversions"),
+ "Print a message whenever an implicit conversion is inserted."),
Definition(
CmdOption("Xlog-implicits"),
"Show more detail on why some implicits are not applicable."),
+ Definition(
+ CmdOption("Xlog-reflective-calls"),
+ "Print a message when a reflective method call is generated."),
+ Definition(
+ CmdOptionBound("Xmacro-settings:", Argument("option")),
+ "Custom settings for macros."),
+ Definition(
+ CmdOption("Xmain-class", Argument("path")),
+ "Class for manifest's Main-Class entry (only useful with -d <jar>)."),
Definition(
CmdOption("Xmax-classfile-name", Argument("n")),
"Maximum filename length for generated classes."),
Definition(
- CmdOption("Xmigration"),
- "Warn about constructs whose behavior may have changed between 2.7 and 2.8."),
+ CmdOptionBound("Xmigration:", Argument("version")),
+ "Warn about constructs whose behavior may have changed since" & Argument("version") & "."),
Definition(
CmdOption("Xno-forwarders"),
"Do not generate static forwarders in mirror classes."),
+ Definition(
+ CmdOption("Xno-patmat-analysis"),
+ "Don't perform exhaustivity/unreachability analysis. Also, ignore " & MItalic("@switch") & " annotation."),
Definition(
CmdOption("Xno-uescape"),
"Disable handling of " & BSlash & "u unicode escapes"),
@@ -221,26 +263,26 @@ object scalac extends Command {
CmdOption("Xnojline"),
"Do not use JLine for editing."),
Definition(
- CmdOptionBound("Xplugin:", Argument("file")),
- "Load a plugin from a file"),
+ CmdOptionBound("Xplugin:", Argument("paths")),
+ "Load a plugin from each classpath."),
Definition(
CmdOptionBound("Xplugin-disable:", Argument("plugin")),
- "Disable a plugin"),
+ "Disable plugins by name."),
Definition(
CmdOption("Xplugin-list"),
- "Print a synopsis of loaded plugins"),
+ "Print a synopsis of loaded plugins."),
Definition(
CmdOptionBound("Xplugin-require:", Argument("plugin")),
- "Abort unless the given plugin(s) are available"),
+ "Abort if a named plugin is not loaded."),
Definition(
CmdOption("Xpluginsdir", Argument("path")),
- "Path to search compiler plugins."),
+ "Path to search for plugin archives."),
Definition(
CmdOptionBound("Xprint:", Argument("phases")),
"Print out program after " & Argument("phases") & " (see below)."),
Definition(
- CmdOption("Xprint-icode"),
- "Log internal icode to *.icode files."),
+ CmdOptionBound("Xprint-icode", "[:" & Argument("phases") & "]"),
+ "Log internal icode to *.icode files after" & Argument("phases") & " (default: icode)."),
Definition(
CmdOption("Xprint-pos"),
"Print tree positions, as offsets."),
@@ -269,9 +311,12 @@ object scalac extends Command {
Definition(
CmdOption("Xsource-reader", Argument("classname")),
"Specify a custom method for reading source files."),
+ Definition(
+ CmdOption("Xstrict-inference"),
+ "Don't infer known-unsound types."),
Definition(
CmdOption("Xverify"),
- "Verify generic signatures in generated bytecode."),
+ "Verify generic signatures in generated bytecode (asm backend only)."),
Definition(
CmdOption("Y"),
"Print a synopsis of private options.")
@@ -281,65 +326,101 @@ object scalac extends Command {
Section("Compilation Phases",
DefinitionList(
Definition(
- MItalic("initial"),
- "initializing compiler"),
- Definition(
- MItalic("parse"),
- "parse source files"),
+ MItalic("parser"),
+ "parse source into ASTs, perform simple desugaring"),
Definition(
MItalic("namer"),
- "create symbols"),
+ "resolve names, attach symbols to named trees"),
+ Definition(
+ MItalic("packageobjects"),
+ "load package objects"),
+ Definition(
+ MItalic("typer"),
+ "the meat and potatoes: type the trees"),
+ Definition(
+ MItalic("patmat"),
+ "translate match expressions"),
+ Definition(
+ MItalic("superaccessors"),
+ "add super accessors in traits and nested classes"),
+ Definition(
+ MItalic("extmethods"),
+ "add extension methods for inline classes"),
+ Definition(
+ MItalic("pickler"),
+ "serialize symbol tables"),
+ Definition(
+ MItalic("refchecks"),
+ "reference/override checking, translate nested objects"),
+ Definition(
+ MItalic("selectiveanf"),
+ "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"),
+ Definition(
+ MItalic("selectivecps"),
+ MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"),
+ Definition(
+ MItalic("uncurry"),
+ "uncurry, translate function values to anonymous classes"),
Definition(
- MItalic("analyze"),
- "name and type analysis"),
+ MItalic("tailcalls"),
+ "replace tail calls by jumps"),
Definition(
- MItalic("refcheck"),
- "reference checking"),
+ MItalic("specialize"),
+ MItalic("@specialized") & "-driven class and method specialization"),
Definition(
- MItalic("uncurry"),
- "uncurry function types and applications"),
+ MItalic("explicitouter"),
+ "this refs to outer pointers, translate patterns"),
+ Definition(
+ MItalic("erasure"),
+ "erase types, add interfaces for traits"),
+ Definition(
+ MItalic("posterasure"),
+ "clean up erased inline classes"),
+ Definition(
+ MItalic("lazyvals"),
+ "allocate bitmaps, translate lazy vals into lazified defs"),
Definition(
MItalic("lambdalift"),
- "lambda lifter"),
+ "move nested functions to top level"),
Definition(
- MItalic("typesasvalues"),
- "represent types as values"),
+ MItalic("constructors"),
+ "move field definitions into constructors"),
Definition(
- MItalic("addaccessors"),
- "add accessors for constructor arguments"),
+ MItalic("flatten"),
+ "eliminate inner classes"),
Definition(
- MItalic("explicitouterclasses"),
- "make links from inner classes to enclosing one explicit"),
+ MItalic("mixin"),
+ "mixin composition"),
Definition(
- MItalic("addconstructors"),
- "add explicit constructor for each class"),
+ MItalic("cleanup"),
+ "platform-specific cleanups, generate reflective calls"),
Definition(
- MItalic("tailcall"),
- "add tail-calls"),
+ MItalic("delambdafy"),
+ "remove lambdas"),
Definition(
- MItalic("wholeprog"),
- "perform whole program analysis"),
+ MItalic("icode"),
+ "generate portable intermediate code"),
Definition(
- MItalic("addinterfaces"),
- "add one interface per class"),
+ MItalic("inliner"),
+ "optimization: do inlining"),
Definition(
- MItalic("expandmixins"),
- "expand mixins by code copying"),
+ MItalic("inlineHandlers"),
+ "optimization: inline exception handlers"),
Definition(
- MItalic("boxing"),
- "makes boxing explicit"),
+ MItalic("closelim"),
+ "optimization: eliminate uncalled closures"),
Definition(
- MItalic("erasure"),
- "type eraser"),
+ MItalic("constopt"),
+ "optimization: optimize null and other constants"),
Definition(
- MItalic("icode"),
- "generate icode"),
+ MItalic("dce"),
+ "optimization: eliminate dead code"),
Definition(
- MItalic("codegen"),
- "enable code generation"),
+ MItalic("jvm"),
+ "generate JVM bytecode"),
Definition(
MItalic("terminal"),
- "compilation terminated"),
+ "the last phase in the compiler chain"),
Definition(
MItalic("all"),
"matches all phases"))))
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 241747e6d8..83da5141b9 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2058,8 +2058,8 @@ trait Trees { self: Universe =>
* @group Extractors
*/
abstract class ExistentialTypeTreeExtractor {
- def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
- def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
+ def apply(tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree
+ def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[MemberDef])]
}
/** The API that all existential type trees support
@@ -2069,8 +2069,12 @@ trait Trees { self: Universe =>
/** The underlying type of the existential type. */
def tpt: Tree
- /** The clauses of the definition of the existential type. */
- def whereClauses: List[Tree]
+ /** The clauses of the definition of the existential type.
+ * Elements are one of the following:
+ * 1) TypeDef with TypeBoundsTree right-hand side
+ * 2) ValDef with empty right-hand side
+ */
+ def whereClauses: List[MemberDef]
}
/** A synthetic tree holding an arbitrary type. Not to be confused with
@@ -2533,7 +2537,7 @@ trait Trees { self: Universe =>
/** Creates a `ExistentialTypeTree` node from the given components, having a given `tree` as a prototype.
* Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
*/
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree
}
// ---------------------- traversing and transforming ------------------------------
@@ -2654,6 +2658,8 @@ trait Trees { self: Universe =>
def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] =
treess mapConserve (transformValDefs(_))
/** Transforms a list of `CaseDef` nodes. */
+ def transformMemberDefs(trees: List[MemberDef]): List[MemberDef] =
+ trees mapConserve (tree => transform(tree).asInstanceOf[MemberDef])
def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] =
trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef])
/** Transforms a list of `Ident` nodes. */
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 19c67879f5..0ca8611719 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -166,9 +166,10 @@ trait BaseTypeSeqs {
val index = new Array[Int](nparents)
var i = 0
for (p <- parents) {
+ val parentBts = p.dealias.baseTypeSeq // dealias need for SI-8046.
pbtss(i) =
- if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
- else p.baseTypeSeq
+ if (parentBts eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
+ else parentBts
index(i) = 0
i += 1
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index da210af938..dde3f1e9f7 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -1208,15 +1208,21 @@ trait Definitions extends api.StandardDefinitions {
}
def getMemberMethod(owner: Symbol, name: Name): TermSymbol = {
getMember(owner, name.toTermName) match {
- // todo. member symbol becomes a term symbol in cleanup. is this a bug?
- // case x: MethodSymbol => x
case x: TermSymbol => x
case _ => fatalMissingSymbol(owner, name, "method")
}
}
+ private lazy val erasurePhase = findPhaseWithName("erasure")
def getMemberIfDefined(owner: Symbol, name: Name): Symbol =
- owner.info.nonPrivateMember(name)
+ // findMember considered harmful after erasure; e.g.
+ //
+ // scala> exitingErasure(Symbol_apply).isOverloaded
+ // res27: Boolean = true
+ //
+ enteringPhaseNotLaterThan(erasurePhase )(
+ owner.info.nonPrivateMember(name)
+ )
/** Using getDecl rather than getMember may avoid issues with
* OverloadedTypes turning up when you don't want them, if you
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index cc6e55192f..91ba552012 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -409,7 +409,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
case from.TypeBoundsTree(lo, hi) =>
new TypeBoundsTree(importTree(lo), importTree(hi))
case from.ExistentialTypeTree(tpt, whereClauses) =>
- new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
+ new ExistentialTypeTree(importTree(tpt), whereClauses map importMemberDef)
case from.EmptyTree =>
EmptyTree
case null =>
@@ -475,6 +475,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef]
+ def importMemberDef(tree: from.MemberDef): MemberDef = importTree(tree).asInstanceOf[MemberDef]
def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template]
def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree]
def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident]
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 0ce5a0fbea..bed8310767 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -244,6 +244,18 @@ abstract class SymbolTable extends macros.Universe
finally popPhase(saved)
}
+ final def findPhaseWithName(phaseName: String): Phase = {
+ var ph = phase
+ while (ph != NoPhase && ph.name != phaseName) {
+ ph = ph.prev
+ }
+ if (ph eq NoPhase) phase else ph
+ }
+ final def enteringPhaseWithName[T](phaseName: String)(body: => T): T = {
+ val phase = findPhaseWithName(phaseName)
+ enteringPhase(phase)(body)
+ }
+
def slowButSafeEnteringPhase[T](ph: Phase)(op: => T): T = {
if (isCompilerUniverse) enteringPhase(ph)(op)
else op
@@ -344,16 +356,18 @@ abstract class SymbolTable extends macros.Universe
// Weak references so the garbage collector will take care of
// letting us know when a cache is really out of commission.
- private val caches = WeakHashSet[Clearable]()
+ import java.lang.ref.WeakReference
+ private var caches = List[WeakReference[Clearable]]()
def recordCache[T <: Clearable](cache: T): T = {
- caches += cache
+ caches ::= new WeakReference(cache)
cache
}
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
- caches foreach (_.clear)
+ caches foreach (ref => Option(ref.get).foreach(_.clear))
+ caches = caches.filterNot(_.get == null)
}
def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
@@ -364,9 +378,9 @@ abstract class SymbolTable extends macros.Universe
val NoCached: T = null.asInstanceOf[T]
var cached: T = NoCached
var cachedRunId = NoRunId
- caches += new Clearable {
+ recordCache(new Clearable {
def clear(): Unit = cached = NoCached
- }
+ })
() => {
if (currentRunId != cachedRunId || cached == NoCached) {
cached = f
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 0dfcf06874..e15b33e5d7 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -801,9 +801,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
isConstructor && !isPrimaryConstructor
/** Is this symbol a synthetic apply or unapply method in a companion object of a case class? */
+ // xeno-by: why this obscure use of the CASE flag? why not simply compare name with nme.apply and nme.unapply?
final def isCaseApplyOrUnapply =
isMethod && isCase && isSynthetic
+ /** Is this symbol a synthetic copy method in a case class? */
+ final def isCaseCopy =
+ isMethod && owner.isCase && isSynthetic && name == nme.copy
+
/** Is this symbol a trait which needs an implementation class? */
final def needsImplClass = (
isTrait
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 6269004298..f6d21ec9bd 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -340,11 +340,13 @@ abstract class TreeGen extends macros.TreeBuilder {
// create parameters for <init> as synthetic trees.
var vparamss1 = mmap(vparamss) { vd =>
- atPos(vd.pos.focus) {
+ val param = atPos(vd.pos.makeTransparent) {
val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
- ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
+ ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, duplicateAndKeepPositions(vd.rhs))
}
+ param
}
+
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
val gvdefs = evdefs map {
@@ -377,15 +379,21 @@ abstract class TreeGen extends macros.TreeBuilder {
// this means that we don't know what will be the arguments of the super call
// therefore here we emit a dummy which gets populated when the template is named and typechecked
Some(
- // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
- // is it going to be a problem that we can no longer include the `argss`?
- atPos(wrappingPos(superPos, lvdefs)) (
+ atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
// Field definitions for the class - remove defaults.
- val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
+
+ val fieldDefs = vparamss.flatten map (vd => {
+ val field = copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree)
+ // Prevent overlapping of `field` end's position with default argument's start position.
+ // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
+ // the `pos` is a point position with all its values equal to `vd.rhs.pos.start`.
+ if(field.pos.isRange && vd.rhs.pos.isRange) field.pos = field.pos.withEnd(vd.rhs.pos.start - 1)
+ field
+ })
global.Template(parents, self, gvdefs ::: fieldDefs ::: constr ++: etdefs ::: rest)
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index d191fbd38f..4a518f6c56 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -541,7 +541,7 @@ trait Trees extends api.Trees {
extends TypTree with TypeBoundsTreeApi
object TypeBoundsTree extends TypeBoundsTreeExtractor
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
+ case class ExistentialTypeTree(tpt: Tree, whereClauses: List[MemberDef])
extends TypTree with ExistentialTypeTreeApi
object ExistentialTypeTree extends ExistentialTypeTreeExtractor
@@ -694,7 +694,7 @@ trait Trees extends api.Trees {
new AppliedTypeTree(tpt, args).copyAttrs(tree)
def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) =
new TypeBoundsTree(lo, hi).copyAttrs(tree)
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) =
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) =
new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
}
@@ -910,7 +910,7 @@ trait Trees extends api.Trees {
if (lo0 == lo) && (hi0 == hi) => t
case _ => treeCopy.TypeBoundsTree(tree, lo, hi)
}
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = tree match {
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) = tree match {
case t @ ExistentialTypeTree(tpt0, whereClauses0)
if (tpt0 == tpt) && (whereClauses0 == whereClauses) => t
case _ => treeCopy.ExistentialTypeTree(tree, tpt, whereClauses)
@@ -1421,7 +1421,7 @@ trait Trees extends api.Trees {
case CompoundTypeTree(templ) =>
treeCopy.CompoundTypeTree(tree, transformTemplate(templ))
case ExistentialTypeTree(tpt, whereClauses) =>
- treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
+ treeCopy.ExistentialTypeTree(tree, transform(tpt), transformMemberDefs(whereClauses))
case Return(expr) =>
treeCopy.Return(tree, transform(expr))
case Alternative(trees) =>
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 99e6ae633f..e9230aceee 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -8,7 +8,6 @@ package reflect
package internal
import scala.collection.{ mutable, immutable, generic }
-import generic.Clearable
import scala.ref.WeakReference
import mutable.ListBuffer
import Flags._
@@ -1999,7 +1998,9 @@ trait Types
if (sym.typeParams.size != args.size)
devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
- asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
+ val GenPolyType(tparams, result) = asSeenFromOwner(tp)
+ assert((tparams eq Nil) || tparams == sym.typeParams, (tparams, sym.typeParams))
+ result.instantiateTypeParams(sym.typeParams, args)
}
// note: does not go through typeRef. There's no need to because
@@ -2309,7 +2310,14 @@ trait Types
}
thisInfo.decls
}
- protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
+ protected[Types] def baseTypeSeqImpl: BaseTypeSeq =
+ if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType))
+ // SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise.
+ transform(sym.info).baseTypeSeq
+ else
+ // Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map
+ // of the BTS of the referenced symbol.
+ sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
@@ -3660,7 +3668,11 @@ trait Types
if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.WeakHashSet[Type](initialUniquesCapacity)
- perRunCaches.recordCache(uniques)
+ // JZ: We used to register this as a perRunCache so it would be cleared eagerly at
+ // the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
+ // When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
+ // I've removed the registration for now. I don't think its particularly harmful anymore
+ // as a) this is now a weak set, and b) it is discarded completely before the next run.
uniqueRunId = currentRunId
}
(uniques findEntryOrUpdate tp).asInstanceOf[T]
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index a6c34935ad..3d222fce10 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -487,6 +487,7 @@ abstract class UnPickler {
def nameRef() = readNameRef()
def tparamRef() = readTypeDefRef()
def vparamRef() = readValDefRef()
+ def memberRef() = readMemberDefRef()
def constRef() = readConstantRef()
def idRef() = readIdentRef()
def termNameRef() = readNameRef().toTermName
@@ -520,7 +521,7 @@ abstract class UnPickler {
case CLASStree => ClassDef(modsRef, typeNameRef, rep(tparamRef), implRef)
case COMPOUNDTYPEtree => CompoundTypeTree(implRef)
case DEFDEFtree => DefDef(modsRef, termNameRef, rep(tparamRef), rep(rep(vparamRef)), ref, ref)
- case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(ref))
+ case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(memberRef))
case FUNCTIONtree => Function(rep(vparamRef), ref)
case IMPORTtree => Import(ref, selectorsRef)
case LABELtree => LabelDef(termNameRef, rep(idRef), ref)
@@ -634,6 +635,12 @@ abstract class UnPickler {
case other =>
errorBadSignature("expected an TypeDef (" + other + ")")
}
+ protected def readMemberDefRef(): MemberDef =
+ readTreeRef() match {
+ case tree:MemberDef => tree
+ case other =>
+ errorBadSignature("expected an MemberDef (" + other + ")")
+ }
protected def errorBadSignature(msg: String) =
throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index b60fecd66e..2623a47be6 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -170,11 +170,20 @@ trait TypeComparers {
// corresponds does not check length of two sequences before checking the predicate,
// but SubstMap assumes it has been checked (SI-2956)
( sameLength(tparams1, tparams2)
- && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info))
+ && (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= subst(p2.info))
&& (res1 =:= subst(res2))
)
}
+ // SI-2066 This prevents overrides with incompatible variance in higher order type parameters.
+ private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = {
+ def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod)
+ ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
+ }
+
+ private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) =
+ methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
+
def isSameType2(tp1: Type, tp2: Type): Boolean = {
def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
@@ -327,7 +336,10 @@ trait TypeComparers {
val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
- def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
+ def cmp(p1: Symbol, p2: Symbol) = (
+ methodHigherOrderTypeParamsSubVariance(p2, p1)
+ && sub2(p2.info) <:< sub1(p1.info)
+ )
(tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
}
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index f61c1f3c50..e4a6503184 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -41,7 +41,8 @@ trait TraceSymbolActivity {
}
}
- private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString)
+ private lazy val erasurePhase = findPhaseWithName("erasure")
+ private def signature(id: Int) = enteringPhase(erasurePhase)(allSymbols(id).defString)
private def dashes(s: Any): String = ("" + s) map (_ => '-')
private def show(s1: Any, ss: Any*) {
@@ -87,14 +88,6 @@ trait TraceSymbolActivity {
private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = {
showMapFreq(xs.toList groupBy groupFn)(showFn)
}
- private lazy val findErasurePhase: Phase = {
- var ph = phase
- while (ph != NoPhase && ph.name != "erasure") {
- ph = ph.prev
- }
- if (ph eq NoPhase) phase else ph
- }
- private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
def showAllSymbols() {
if (!enabled) return
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index 31905c4739..f3e934d12b 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -20,19 +20,6 @@ trait Enclosures {
*/
def macroApplication: Tree
- /** The semantic role that `macroApplication` plays in the code.
- */
- type MacroRole
-
- /** The role that represents an application of a term macro,
- * e.g. `M(2)(3)` in `val x = M(2)(3)` or `M(a, b)` in `x match { case x @ M(a, b) => }`.
- */
- def APPLY_ROLE: MacroRole
-
- /** The semantic role that `macroApplication` plays in the code.
- */
- def macroRole: MacroRole
-
/** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
* Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
*
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index ed296a100d..effb97b96d 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -426,6 +426,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.languageFeatureModule
definitions.metaAnnotations
definitions.AnnotationDefaultAttr
+ // inaccessible: definitions.erasurePhase
definitions.isPhantomClass
definitions.syntheticCoreClasses
definitions.syntheticCoreMethods
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
deleted file mode 100644
index 578ef71e09..0000000000
--- a/src/swing/scala/swing/Publisher.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.swing
-
-import scala.collection.mutable
-import mutable.Buffer
-import event.Event
-
-/** <p>
- * Notifies registered reactions when an event is published. Publishers are
- * also reactors and listen to themselves per default as a convenience.
- * </p>
- * <p>
- * In order to reduce memory leaks, reactions are weakly referenced by default,
- * unless they implement <code>Reactions.StronglyReferenced</code>. That way,
- * the lifetime of reactions are more easily bound to the registering object,
- * which are reactors in common client code and hold strong references to their
- * reactions. As a result, reactors can be garbage collected even though they
- * still have reactions registered at some publisher, but not vice versa
- * since reactors (strongly) reference publishers they are interested in.
- * </p>
- */
-trait Publisher extends Reactor {
- import Reactions._
-
- protected val listeners = new RefSet[Reaction] {
- import scala.ref._
- val underlying = new mutable.HashSet[Reference[Reaction]]
- protected def Ref(a: Reaction) = a match {
- case a: StronglyReferenced => new StrongReference[Reaction](a) with super.Ref[Reaction]
- case _ => new WeakReference[Reaction](a, referenceQueue) with super.Ref[Reaction]
- }
- }
-
- private[swing] def subscribe(listener: Reaction) { listeners += listener }
- private[swing] def unsubscribe(listener: Reaction) { listeners -= listener }
-
- /**
- * Notify all registered reactions.
- */
- def publish(e: Event) { for (l <- listeners) if (l.isDefinedAt(e)) l(e) }
-
- listenTo(this)
-}
-
-/**
- * A publisher that subscribes itself to an underlying event source not before the first
- * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled.
- */
-private[swing] trait LazyPublisher extends Publisher {
- import Reactions._
-
- protected def onFirstSubscribe()
- protected def onLastUnsubscribe()
-
- override def subscribe(listener: Reaction) {
- if(listeners.size == 1) onFirstSubscribe()
- super.subscribe(listener)
- }
- override def unsubscribe(listener: Reaction) {
- super.unsubscribe(listener)
- if(listeners.size == 1) onLastUnsubscribe()
- }
-}
-
-
-
-import scala.ref._
-
-private[swing] trait SingleRefCollection[+A <: AnyRef] extends Iterable[A] { self =>
-
- trait Ref[+A <: AnyRef] extends Reference[A] {
- override def hashCode() = get match {
- case Some(x) => x.##
- case _ => 0
- }
- override def equals(that: Any) = that match {
- case that: ReferenceWrapper[_] =>
- val v1 = this.get
- val v2 = that.get
- v1 == v2
- case _ => false
- }
- }
-
- //type Ref <: Reference[A] // TODO: could use higher kinded types, but currently crashes
- protected[this] def Ref(a: A): Ref[A]
- protected[this] val referenceQueue = new ReferenceQueue[A]
-
- protected val underlying: Iterable[Reference[A]]
-
- def purgeReferences() {
- var ref = referenceQueue.poll
- while (ref != None) {
- removeReference(ref.get.asInstanceOf[Reference[A]])
- ref = referenceQueue.poll
- }
- }
-
- protected[this] def removeReference(ref: Reference[A])
-
- def iterator = new Iterator[A] {
- private val elems = self.underlying.iterator
- private var hd: A = _
- private var ahead: Boolean = false
- private def skip(): Unit =
- while (!ahead && elems.hasNext) {
- // make sure we have a reference to the next element,
- // otherwise it might be garbage collected
- val next = elems.next.get
- ahead = next != None
- if (ahead) hd = next.get
- }
- def hasNext: Boolean = { skip; ahead }
- def next(): A =
- if (hasNext) { ahead = false; hd }
- else throw new NoSuchElementException("next on empty iterator")
- }
-}
-
-private[swing] class StrongReference[+T <: AnyRef](value: T) extends Reference[T] {
- private[this] var ref: Option[T] = Some(value)
- def isValid: Boolean = ref != None
- def apply(): T = ref.get
- def get : Option[T] = ref
- override def toString = get.map(_.toString).getOrElse("<deleted>")
- def clear() { ref = None }
- def enqueue(): Boolean = false
- def isEnqueued(): Boolean = false
- }
-
-abstract class RefBuffer[A <: AnyRef] extends Buffer[A] with SingleRefCollection[A] { self =>
- protected val underlying: Buffer[Reference[A]]
-
- def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
- def +=:(el: A) = { purgeReferences(); Ref(el) +=: underlying; this }
- def remove(el: A) { underlying -= Ref(el); purgeReferences(); }
- def remove(n: Int) = { val el = apply(n); remove(el); el }
- def insertAll(n: Int, iter: Iterable[A]) {
- purgeReferences()
- underlying.insertAll(n, iter.view.map(Ref(_)))
- }
- def update(n: Int, el: A) { purgeReferences(); underlying(n) = Ref(el) }
- def apply(n: Int) = {
- purgeReferences()
- var el = underlying(n).get
- while (el == None) {
- purgeReferences(); el = underlying(n).get
- }
- el.get
- }
-
- def length = { purgeReferences(); underlying.length }
- def clear() { underlying.clear(); purgeReferences() }
-
- protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
-
-private[swing] abstract class RefSet[A <: AnyRef] extends mutable.Set[A] with SingleRefCollection[A] { self =>
- protected val underlying: mutable.Set[Reference[A]]
-
- def -=(el: A): this.type = { underlying -= Ref(el); purgeReferences(); this }
- def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
- def contains(el: A): Boolean = { purgeReferences(); underlying.contains(Ref(el)) }
- override def size = { purgeReferences(); underlying.size }
-
- protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
-}
diff --git a/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala
index a00f4ed7db..40ca1d549c 100644
--- a/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala
+++ b/test/files/neg/macro-blackbox-fundep-materialization/Test_2.scala
@@ -1,4 +1,4 @@
-// see the comments for macroExpandApply.onDelayed for an explanation of what's tested here
+// see the comments for macroExpand.onDelayed for an explanation of what's tested here
object Test extends App {
case class Foo(i: Int, s: String, b: Boolean)
def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
diff --git a/test/files/neg/macro-incompatible-macro-engine.check b/test/files/neg/macro-incompatible-macro-engine.check
new file mode 100644
index 0000000000..1d582e5ed6
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine.check
@@ -0,0 +1,7 @@
+Test_3.scala:2: error: macro cannot be expanded, because it was compiled by an incompatible macro engine vxxx (implemented in the incompatibleMacroEngine plugin)
+ Macros.foo
+ ^
+Test_3.scala:3: error: macro cannot be expanded, because it was compiled by an incompatible macro engine vxxx (implemented in the incompatibleMacroEngine plugin)
+ Macros.foo
+ ^
+two errors found
diff --git a/test/files/neg/macro-incompatible-macro-engine/Macros_2.flags b/test/files/neg/macro-incompatible-macro-engine/Macros_2.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine/Macros_2.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine/Macros_2.scala b/test/files/neg/macro-incompatible-macro-engine/Macros_2.scala
new file mode 100644
index 0000000000..ad57a3cb36
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine/Macros_2.scala
@@ -0,0 +1,7 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.BlackboxContext
+
+object Macros {
+ def impl(c: BlackboxContext) = c.universe.Literal(c.universe.Constant(()))
+ def foo: Unit = macro impl
+} \ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine/Plugin_1.scala b/test/files/neg/macro-incompatible-macro-engine/Plugin_1.scala
new file mode 100644
index 0000000000..44ed91d2fb
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine/Plugin_1.scala
@@ -0,0 +1,35 @@
+package incompatibleMacroEngine
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+
+ val name = "incompatibleMacroEngine"
+ val description = "A sample analyzer plugin that crafts a macro impl binding with a non-standard macro engine."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ def fixupBinding(tree: Tree) = new Transformer {
+ override def transform(tree: Tree) = {
+ tree match {
+ case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const)
+ case _ if tree.tpe == null => tree setType NoType
+ case _ => ;
+ }
+ super.transform(tree)
+ }
+ }.transform(tree)
+
+ override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = {
+ val result = standardTypedMacroBody(typer, ddef)
+ val List(AnnotationInfo(atp, List(Apply(nucleus, _ :: others)), Nil)) = ddef.symbol.annotations
+ val updatedBinding = Apply(nucleus, Assign(Literal(Constant("macroEngine")), Literal(Constant("vxxx (implemented in the incompatibleMacroEngine plugin)"))) :: others)
+ ddef.symbol.setAnnotations(List(AnnotationInfo(atp, List(fixupBinding(updatedBinding)), Nil)))
+ Some(result)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine/Test_3.scala b/test/files/neg/macro-incompatible-macro-engine/Test_3.scala
new file mode 100644
index 0000000000..7e4fae5236
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo
+ Macros.foo
+} \ No newline at end of file
diff --git a/test/files/neg/macro-incompatible-macro-engine/scalac-plugin.xml b/test/files/neg/macro-incompatible-macro-engine/scalac-plugin.xml
new file mode 100644
index 0000000000..42b9cdd75d
--- /dev/null
+++ b/test/files/neg/macro-incompatible-macro-engine/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>incompatible-macro-engine</name>
+ <classname>incompatibleMacroEngine.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/neg/t2066.check b/test/files/neg/t2066.check
new file mode 100644
index 0000000000..efade87e26
--- /dev/null
+++ b/test/files/neg/t2066.check
@@ -0,0 +1,21 @@
+t2066.scala:6: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+ override def f[T[+_]] = ()
+ ^
+t2066.scala:10: error: overriding method f in trait A1 of type [T[_]]=> Unit;
+ method f has incompatible type
+ override def f[T[-_]] = ()
+ ^
+t2066.scala:23: error: overriding method f in trait A2 of type [T[+_]]=> Unit;
+ method f has incompatible type
+ override def f[T[-_]] = ()
+ ^
+t2066.scala:45: error: overriding method f in trait A4 of type [T[X[+_]]]=> Unit;
+ method f has incompatible type
+ override def f[T[X[_]]] = ()
+ ^
+t2066.scala:53: error: overriding method f in trait A5 of type [T[X[-_]]]=> Unit;
+ method f has incompatible type
+ override def f[T[X[_]]] = ()
+ ^
+5 errors found
diff --git a/test/files/neg/t2066.scala b/test/files/neg/t2066.scala
new file mode 100644
index 0000000000..7f15d39c67
--- /dev/null
+++ b/test/files/neg/t2066.scala
@@ -0,0 +1,70 @@
+trait A1 {
+ def f[T[_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[+_]] = ()
+}
+
+trait C1 extends A1 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A2 {
+ def f[T[+_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[_]] = () // okay
+}
+
+trait C2 extends A2 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+ def f[T[-_]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[_]] = () // okay
+}
+
+trait C3 extends A3 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A4 {
+ def f[T[X[+_]]] = ()
+}
+
+trait B4 extends A4 {
+ override def f[T[X[_]]] = ()
+}
+
+trait A5 {
+ def f[T[X[-_]]] = ()
+}
+
+trait B5 extends A5 {
+ override def f[T[X[_]]] = ()
+}
+
+
+
+trait A6 {
+ def f[T[X[_]]] = ()
+}
+
+trait B6 extends A6 {
+ override def f[T[X[+_]]] = () // okay
+}
+trait C6 extends A6 {
+ override def f[T[X[_]]] = () // okay
+}
+trait D6 extends A6 {
+ override def f[T[X[-_]]] = ()
+}
diff --git a/test/files/neg/t2066b.check b/test/files/neg/t2066b.check
new file mode 100644
index 0000000000..097c44fef3
--- /dev/null
+++ b/test/files/neg/t2066b.check
@@ -0,0 +1,5 @@
+t2066b.scala:7: error: overriding method f in trait A of type [T[_]](x: T[Int])T[Any];
+ method f has incompatible type
+ def f[T[+_]](x : T[Int]) : T[Any] = x
+ ^
+one error found
diff --git a/test/pending/neg/t2066.scala b/test/files/neg/t2066b.scala
index 46177b19f7..46177b19f7 100644
--- a/test/pending/neg/t2066.scala
+++ b/test/files/neg/t2066b.scala
diff --git a/test/files/pos/t2066.scala b/test/files/pos/t2066.scala
new file mode 100644
index 0000000000..30cb99d45c
--- /dev/null
+++ b/test/files/pos/t2066.scala
@@ -0,0 +1,25 @@
+trait A1 {
+ def f[T[+_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[_]] = ()
+}
+
+
+trait A2 {
+ def f[T[-_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[_]] = ()
+}
+
+
+trait A3 {
+ def f[T[X[_]]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[X[+_]]] = ()
+}
diff --git a/test/files/pos/t8046.scala b/test/files/pos/t8046.scala
new file mode 100644
index 0000000000..304d70b6b8
--- /dev/null
+++ b/test/files/pos/t8046.scala
@@ -0,0 +1,20 @@
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Two extends One {
+ trait Op[A] extends (A => A)
+
+ // This compiles
+ class View1 extends Op[Int] { def apply(xs: Int) = xs }
+
+ // ??? base class View2 not found in basetypes of class View2
+ // ./a.scala:9: error: class View2 needs to be abstract, since \
+ // method apply in trait Function1 of type (v1: T1)R is not defined
+ // (Note that T1 does not match Int)
+ // class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+ // ^
+ // one error found
+ class View2 extends Alias[Int] { def apply(xs: Int) = xs }
+}
diff --git a/test/files/pos/t8046b.scala b/test/files/pos/t8046b.scala
new file mode 100644
index 0000000000..45b99fd7e0
--- /dev/null
+++ b/test/files/pos/t8046b.scala
@@ -0,0 +1,16 @@
+trait One {
+ type Op[A]
+ type Alias = Op[Int]
+}
+
+trait Two extends One {
+ trait Op[A] extends M[A]
+ //(a: Alias) => a.value.toChar // okay
+ // (=> A).asSeenFrom(a.type, trait M): => Int
+ class View2 extends Alias { value.toChar } // toChar is not a member of type parameter A
+ // (=> A).asSeenFrom(View2.this.type, trait M): => A
+
+ // override type Alias = Op[Int] // works with this
+}
+
+trait M[A] { def value: A = sys.error("") }
diff --git a/test/files/pos/t8046c.scala b/test/files/pos/t8046c.scala
new file mode 100644
index 0000000000..f05b4c15b5
--- /dev/null
+++ b/test/files/pos/t8046c.scala
@@ -0,0 +1,19 @@
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+ trait Op[A] extends (A => A)
+
+ def f1(f: Op[Int]) = f(5)
+ def f2(f: Alias[Int]) = f(5)
+ def f3[T <: Op[Int]](f: T) = f(5)
+ def f4[T <: Alias[Int]](f: T) = f(5)
+ // ./a.scala:12: error: type mismatch;
+ // found : Int(5)
+ // required: T1
+ // def f4[T <: Alias[Int]](f: T) = f(5)
+ // ^
+}
+
diff --git a/test/files/pos/t8064.flags b/test/files/pos/t8064.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/t8064.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8064/Client_2.scala b/test/files/pos/t8064/Client_2.scala
new file mode 100644
index 0000000000..44106782c7
--- /dev/null
+++ b/test/files/pos/t8064/Client_2.scala
@@ -0,0 +1,8 @@
+object Test {
+ Macro {
+ def s = ""
+ Macro(s): @unchecked
+ ???
+ }
+}
+// Was: a range position validation error (unpositioned tree) \ No newline at end of file
diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala
new file mode 100644
index 0000000000..4fabd54a89
--- /dev/null
+++ b/test/files/pos/t8064/Macro_1.scala
@@ -0,0 +1,10 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macro {
+ def apply(a: Any): Any = macro impl
+
+ def impl(c: Context)(a: c.Tree): c.Tree = {
+ c.resetLocalAttrs(a)
+ }
+}
diff --git a/test/files/pos/t8064b.flags b/test/files/pos/t8064b.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/t8064b.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8064b/Client_2.scala b/test/files/pos/t8064b/Client_2.scala
new file mode 100644
index 0000000000..a7bf2b9fb4
--- /dev/null
+++ b/test/files/pos/t8064b/Client_2.scala
@@ -0,0 +1,6 @@
+object Test {
+ Macro {
+ "".reverse
+ }
+}
+// Was: a range position validation error (tree with offset position enclosing tree with range position) \ No newline at end of file
diff --git a/test/files/pos/t8064b/Macro_1.scala b/test/files/pos/t8064b/Macro_1.scala
new file mode 100644
index 0000000000..82582356c0
--- /dev/null
+++ b/test/files/pos/t8064b/Macro_1.scala
@@ -0,0 +1,11 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macro {
+ def apply(a: Any): Any = macro impl
+ def impl(c: Context)(a: c.Tree): c.Tree = {
+ import c.universe._
+
+ q"{$a; true}"
+ }
+}
diff --git a/test/files/pos/t8120.scala b/test/files/pos/t8120.scala
new file mode 100644
index 0000000000..e06f38d5db
--- /dev/null
+++ b/test/files/pos/t8120.scala
@@ -0,0 +1,9 @@
+object A {
+ class C {
+ def m(a: Nothing): Int = 0
+ }
+ implicit class RichAny(a: Any) {
+ def m(a: Any): Int = 0
+ }
+ (new C).m({ case (x, y) => x } : Any => Any)
+}
diff --git a/test/files/presentation/hyperlinks-macro.check b/test/files/presentation/hyperlinks-macro.check
new file mode 100644
index 0000000000..80d2268fa1
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro.check
@@ -0,0 +1,11 @@
+reload: MacroCall.scala
+
+askHyperlinkPos for `foo` at (5,7) MacroCall.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (2,7) MacroCall.scala
+================================================================================
+
+askHyperlinkPos for `foo` at (9,7) MacroCall.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (2,7) MacroCall.scala
+================================================================================
diff --git a/test/files/presentation/hyperlinks-macro/Runner.scala b/test/files/presentation/hyperlinks-macro/Runner.scala
new file mode 100644
index 0000000000..c2f89bdb17
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro/Runner.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override def runDefaultTests() {
+ sourceFiles foreach (src => askLoadedTyped(src).get)
+ super.runDefaultTests()
+ }
+}
diff --git a/test/files/presentation/hyperlinks-macro/src/MacroCall.scala b/test/files/presentation/hyperlinks-macro/src/MacroCall.scala
new file mode 100644
index 0000000000..d9676b3d2a
--- /dev/null
+++ b/test/files/presentation/hyperlinks-macro/src/MacroCall.scala
@@ -0,0 +1,11 @@
+object Test {
+ def foo = 0
+
+ scala.reflect.runtime.universe.reify {
+ foo/*#*/
+ }
+
+ identity {
+ foo/*#*/
+ }
+}
diff --git a/test/files/presentation/t4287.check b/test/files/presentation/t4287.check
new file mode 100644
index 0000000000..a922421e18
--- /dev/null
+++ b/test/files/presentation/t4287.check
@@ -0,0 +1,11 @@
+reload: Foo.scala
+
+askHyperlinkPos for `B` at (1,24) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `B` at (3,8) Foo.scala
+================================================================================
+
+askHyperlinkPos for `a` at (1,31) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `a` at (4,7) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287/Test.scala b/test/files/presentation/t4287/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/files/presentation/t4287/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t4287/src/Foo.scala b/test/files/presentation/t4287/src/Foo.scala
new file mode 100644
index 0000000000..a744eaabe2
--- /dev/null
+++ b/test/files/presentation/t4287/src/Foo.scala
@@ -0,0 +1,5 @@
+class Baz(val f: Int = B/*#*/.a/*#*/)
+
+object B {
+ val a = 2
+}
diff --git a/test/files/presentation/t4287b.check b/test/files/presentation/t4287b.check
new file mode 100644
index 0000000000..d4b33650fd
--- /dev/null
+++ b/test/files/presentation/t4287b.check
@@ -0,0 +1,6 @@
+reload: Foo.scala
+
+askHyperlinkPos for `i` at (14,11) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `i` at (10,9) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287b/Test.scala b/test/files/presentation/t4287b/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/files/presentation/t4287b/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t4287b/src/Foo.scala b/test/files/presentation/t4287b/src/Foo.scala
new file mode 100644
index 0000000000..47c676e2a2
--- /dev/null
+++ b/test/files/presentation/t4287b/src/Foo.scala
@@ -0,0 +1,15 @@
+trait Greeting {
+ val name: String
+ val msg = "How are you, "+name
+}
+
+object Greeting {
+ val hello = "hello"
+}
+
+class C(i: Int) extends {
+ val nameElse = "Bob"
+} with Greeting {
+ val name = "avc"
+ println(i/*#*/)
+} \ No newline at end of file
diff --git a/test/files/presentation/t4287c.check b/test/files/presentation/t4287c.check
new file mode 100644
index 0000000000..42fc30997d
--- /dev/null
+++ b/test/files/presentation/t4287c.check
@@ -0,0 +1,11 @@
+reload: Foo.scala
+
+askHyperlinkPos for `A` at (1,18) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (3,8) Foo.scala
+================================================================================
+
+askHyperlinkPos for `a` at (1,25) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `a` at (4,7) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t4287c.flags b/test/files/presentation/t4287c.flags
new file mode 100644
index 0000000000..d1a8244169
--- /dev/null
+++ b/test/files/presentation/t4287c.flags
@@ -0,0 +1 @@
+-Yinfer-argument-types \ No newline at end of file
diff --git a/test/files/presentation/t4287c/Test.scala b/test/files/presentation/t4287c/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/files/presentation/t4287c/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t4287c/src/Foo.scala b/test/files/presentation/t4287c/src/Foo.scala
new file mode 100644
index 0000000000..26870b5021
--- /dev/null
+++ b/test/files/presentation/t4287c/src/Foo.scala
@@ -0,0 +1,9 @@
+class A(a: Int = A/*#*/.a/*#*/)
+
+object A {
+ val a = 2
+}
+
+class B extends A {
+ def this(a) = this()
+} \ No newline at end of file
diff --git a/test/files/run/macro-default-params.check b/test/files/run/macro-default-params.check
new file mode 100644
index 0000000000..573541ac97
--- /dev/null
+++ b/test/files/run/macro-default-params.check
@@ -0,0 +1 @@
+0
diff --git a/test/files/run/macro-default-params/Macros_1.scala b/test/files/run/macro-default-params/Macros_1.scala
new file mode 100644
index 0000000000..47780ea4b8
--- /dev/null
+++ b/test/files/run/macro-default-params/Macros_1.scala
@@ -0,0 +1,27 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.WhiteboxContext
+
+object Macros {
+ def id[A]: A = null.asInstanceOf[A]
+
+ def foo: Any = macro impl
+ def impl(c: WhiteboxContext): c.Tree = {
+ import c.universe._
+ import Flag._
+
+ lazy val tpe = TypeTree(typeOf[Int])
+
+ /* If we used this line instead, it would work! */
+ // lazy val tpe = tq"Int"
+
+ lazy val param: ValDef = {
+ val p1 = q"val a: ${tpe.duplicate} = Macros.id[${tpe.duplicate}]"
+ ValDef(Modifiers(DEFAULTPARAM), p1.name, p1.tpt, p1.rhs)
+ }
+
+ q"""
+ class C { def f($param) = a }
+ println(new C().f())
+ """
+ }
+}
diff --git a/test/files/run/macro-default-params/Test_2.scala b/test/files/run/macro-default-params/Test_2.scala
new file mode 100644
index 0000000000..5d19639cdd
--- /dev/null
+++ b/test/files/run/macro-default-params/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
diff --git a/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala
index a00f4ed7db..40ca1d549c 100644
--- a/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala
+++ b/test/files/run/macro-whitebox-fundep-materialization/Test_2.scala
@@ -1,4 +1,4 @@
-// see the comments for macroExpandApply.onDelayed for an explanation of what's tested here
+// see the comments for macroExpand.onDelayed for an explanation of what's tested here
object Test extends App {
case class Foo(i: Int, s: String, b: Boolean)
def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
diff --git a/test/files/run/macroPlugins-macroArgs.check b/test/files/run/macroPlugins-macroArgs.check
new file mode 100644
index 0000000000..a68f8069b6
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs.check
@@ -0,0 +1,2 @@
+hijacked 1
+hijacked 2
diff --git a/test/files/run/macroPlugins-macroArgs/Macros_2.scala b/test/files/run/macroPlugins-macroArgs/Macros_2.scala
new file mode 100644
index 0000000000..d6521dfbcb
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Macros_2.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.BlackboxContext
+
+object Macros {
+ def impl(c: BlackboxContext)(arg: c.Tree) = {
+ import c.universe._
+ q"""println($arg)"""
+ }
+
+ def foo(arg: String): Unit = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Plugin_1.scala b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala
new file mode 100644
index 0000000000..23e80ced3b
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Plugin_1.scala
@@ -0,0 +1,21 @@
+package macroArgs
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+
+ val name = "macroArgs"
+ val description = "A sample analyzer plugin that overrides macroArgs."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ override def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = {
+ val MacroArgs(c, List(Literal(Constant(s: String)))) = standardMacroArgs(typer, expandee)
+ Some(MacroArgs(c, List(Literal(Constant("hijacked " + s)))))
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Test_3.flags b/test/files/run/macroPlugins-macroArgs/Test_3.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/Test_3.scala b/test/files/run/macroPlugins-macroArgs/Test_3.scala
new file mode 100644
index 0000000000..a54d608178
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo("1")
+ Macros.foo("2")
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml b/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml
new file mode 100644
index 0000000000..0849f0f4ea
--- /dev/null
+++ b/test/files/run/macroPlugins-macroArgs/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>macro-args</name>
+ <classname>macroArgs.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand.check b/test/files/run/macroPlugins-macroExpand.check
new file mode 100644
index 0000000000..6f685c2af4
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand.check
@@ -0,0 +1,2 @@
+expanded into println("impl1")
+expanded into println("impl2")
diff --git a/test/files/run/macroPlugins-macroExpand/Macros_2.scala b/test/files/run/macroPlugins-macroExpand/Macros_2.scala
new file mode 100644
index 0000000000..f16503b415
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Macros_2.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.BlackboxContext
+
+object Macros {
+ def impl1(c: BlackboxContext) = {
+ import c.universe._
+ q"""println("impl1")"""
+ }
+
+ def impl2(c: BlackboxContext) = {
+ import c.universe._
+ q"""println("impl2")"""
+ }
+
+ def foo1: Unit = macro impl1
+
+ def foo2: Unit = macro impl2
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Plugin_1.scala b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala
new file mode 100644
index 0000000000..13df85cb23
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Plugin_1.scala
@@ -0,0 +1,27 @@
+package macroExpand
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+ import scala.reflect.internal.Mode
+
+ val name = "macroExpand"
+ val description = "A sample analyzer plugin that overrides macroExpand."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ override def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = {
+ object expander extends DefMacroExpander(typer, expandee, mode, pt) {
+ override def onSuccess(expanded: Tree) = {
+ val message = s"expanded into ${expanded.toString}"
+ typer.typed(q"println($message)")
+ }
+ }
+ Some(expander(expandee))
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Test_3.flags b/test/files/run/macroPlugins-macroExpand/Test_3.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/Test_3.scala b/test/files/run/macroPlugins-macroExpand/Test_3.scala
new file mode 100644
index 0000000000..def9b5608a
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo1
+ Macros.foo2
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml b/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml
new file mode 100644
index 0000000000..860150865c
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>macro-expand</name>
+ <classname>macroExpand.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime.check b/test/files/run/macroPlugins-macroRuntime.check
new file mode 100644
index 0000000000..af16d1ac36
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime.check
@@ -0,0 +1,2 @@
+hijacked
+hijacked
diff --git a/test/files/run/macroPlugins-macroRuntime/Macros_2.scala b/test/files/run/macroPlugins-macroRuntime/Macros_2.scala
new file mode 100644
index 0000000000..d6521dfbcb
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Macros_2.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.BlackboxContext
+
+object Macros {
+ def impl(c: BlackboxContext)(arg: c.Tree) = {
+ import c.universe._
+ q"""println($arg)"""
+ }
+
+ def foo(arg: String): Unit = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala
new file mode 100644
index 0000000000..a55adadb48
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Plugin_1.scala
@@ -0,0 +1,20 @@
+package macroRuntime
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+
+ val name = "macroRuntime"
+ val description = "A sample analyzer plugin that overrides macroRuntime."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ override def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = Some({
+ case MacroArgs(_, List(msg)) => q"""println("hijacked")"""
+ })
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Test_3.flags b/test/files/run/macroPlugins-macroRuntime/Test_3.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/Test_3.scala b/test/files/run/macroPlugins-macroRuntime/Test_3.scala
new file mode 100644
index 0000000000..a54d608178
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo("1")
+ Macros.foo("2")
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml b/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml
new file mode 100644
index 0000000000..8001af1054
--- /dev/null
+++ b/test/files/run/macroPlugins-macroRuntime/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>macro-runtime</name>
+ <classname>macroRuntime.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/run/macroPlugins-namerHooks.check b/test/files/run/macroPlugins-namerHooks.check
new file mode 100644
index 0000000000..c2db5935d4
--- /dev/null
+++ b/test/files/run/macroPlugins-namerHooks.check
@@ -0,0 +1,45 @@
+enterSym(package <empty> { case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } } })
+enterSym(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } })
+ensureCompanionObject(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } }, ...)
+enterSym(<synthetic> object C extends runtime.this.AbstractFunction2[Int, Int, C] { def <init>() = { super.<init>(); () }; final override <synthetic> def toString() = "C" })
+enterStat(case class C extends scala.Product with scala.Serializable { <caseaccessor> <paramaccessor> val x: Int = _; <caseaccessor> <paramaccessor> val y: Int = _; def <init>(x: Int, y: Int) = { super.<init>(); () } })
+enterSym(<caseaccessor> <paramaccessor> val x: Int = _)
+enterSym(<caseaccessor> <paramaccessor> val y: Int = _)
+enterSym(def <init>(x: Int, y: Int) = { super.<init>(); () })
+enterSym(<synthetic> def copy(x = x, y = y) = new C(x, y))
+enterStat(<caseaccessor> <paramaccessor> private[this] val x: Int = _)
+enterStat(<caseaccessor> <paramaccessor> private[this] val y: Int = _)
+enterStat(def <init>(x: Int, y: Int) = { super.<init>(); () })
+enterSym(<caseaccessor> <paramaccessor> private[this] val x: Int = _)
+enterSym(<caseaccessor> <paramaccessor> private[this] val y: Int = _)
+enterSym(def <init>(x: Int, y: Int) = { super.<init>(); () })
+enterSym(super.<init>())
+enterStat(super.<init>())
+enterSym(<synthetic> def copy$default$1 = x)
+enterSym(<synthetic> def copy$default$2 = y)
+enterSym(<synthetic> var acc: Int = -889275714)
+enterSym(acc = Statics.this.mix(acc, x))
+enterSym(acc = Statics.this.mix(acc, y))
+enterStat(<synthetic> var acc: Int = -889275714)
+enterStat(acc = Statics.this.mix(acc, x))
+enterStat(acc = Statics.this.mix(acc, y))
+enterSym(<synthetic> val C$1: C = x$1.asInstanceOf[C])
+enterStat(<synthetic> val C$1: C = x$1.asInstanceOf[C])
+enterSym(def <init>() = { super.<init>(); () })
+enterSym(final override <synthetic> def toString() = "C")
+enterSym(case <synthetic> def apply(x: Int, y: Int): C = new C(x, y))
+enterSym(case <synthetic> def unapply(x$0: C) = if (x$0.==(null)) scala.this.None else Some(scala.Tuple2(x$0.x, x$0.y)))
+enterStat(def <init>() = { super.<init>(); () })
+enterStat(final override <synthetic> def toString() = "C")
+enterSym(def <init>() = { super.<init>(); () })
+enterSym(final override <synthetic> def toString() = "C")
+enterSym(super.<init>())
+enterStat(super.<init>())
+enterSym(case <synthetic> val x1: Int = x$1)
+enterStat(case <synthetic> val x1: Int = x$1)
+enterSym(case <synthetic> val x1: Any = x$1)
+enterSym(case5(){ if (x1.isInstanceOf[C]) matchEnd4(true) else case6() })
+enterSym(case6(){ matchEnd4(false) })
+enterStat(case <synthetic> val x1: Any = x$1)
+enterStat(case5(){ if (x1.isInstanceOf[C]) matchEnd4(true) else case6() })
+enterStat(case6(){ matchEnd4(false) })
diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala
new file mode 100644
index 0000000000..302429b19e
--- /dev/null
+++ b/test/files/run/macroPlugins-namerHooks.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp"
+
+ def code = """
+ case class C(x: Int, y: Int)
+ """.trim
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+
+ val output = collection.mutable.ListBuffer[String]()
+ def log(what: String) = output += what.replace(String.format("%n"), " ")
+
+ object macroPlugin extends MacroPlugin {
+ override def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = {
+ log(s"enterSym($tree)")
+ namer.standardEnterSym(tree)
+ true
+ }
+ override def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = {
+ log(s"ensureCompanionObject($cdef, ...)")
+ Some(namer.standardEnsureCompanionObject(cdef, creator))
+ }
+ override def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+ stats.foreach(stat => log(s"enterStat($stat)"))
+ stats
+ }
+ }
+
+ addMacroPlugin(macroPlugin)
+ compileString(global)(code)
+ println(output.mkString("\n"))
+ }
+}
diff --git a/test/files/run/macroPlugins-typedMacroBody.check b/test/files/run/macroPlugins-typedMacroBody.check
new file mode 100644
index 0000000000..b6f8436189
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody.check
@@ -0,0 +1,2 @@
+impl1
+impl2
diff --git a/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags b/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Macros_2.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala
new file mode 100644
index 0000000000..fa8522e729
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Macros_2.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.BlackboxContext
+
+object Macros {
+ def impl1(c: BlackboxContext) = {
+ import c.universe._
+ q"""println("impl1")"""
+ }
+
+ def impl2(c: BlackboxContext) = {
+ import c.universe._
+ q"""println("impl2")"""
+ }
+
+ def foo1: Unit = macro 1
+
+ def foo2: Unit = macro 2
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala
new file mode 100644
index 0000000000..e99cf7f75d
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Plugin_1.scala
@@ -0,0 +1,21 @@
+package typedMacroBody
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+
+ val name = "typedMacroBody"
+ val description = "A sample analyzer plugin that overrides typedMacroBody."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ override def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = {
+ val DefDef(_, _, _, _, _, Literal(Constant(num: Int))) = ddef
+ Some(standardTypedMacroBody(typer, copyDefDef(ddef)(rhs = Ident(TermName("impl" + num)))))
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/Test_3.scala b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala
new file mode 100644
index 0000000000..def9b5608a
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo1
+ Macros.foo2
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml b/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml
new file mode 100644
index 0000000000..e223fa5dca
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>typed-macro-body</name>
+ <classname>typedMacroBody.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/run/mutable-anyrefmap.scala b/test/files/run/mutable-anyrefmap.scala
deleted file mode 100644
index ff615d0daf..0000000000
--- a/test/files/run/mutable-anyrefmap.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-object Test extends App {
-
- import scala.collection.mutable.HashMap;
- import scala.collection.mutable.AnyRefMap;
-
- val keys = Array(
- null, "perch", "herring", "salmon", "pike", "cod", ""
- )
-
- val rn = new scala.util.Random(42L)
- var arm = AnyRefMap.empty[String, Int]
- val hm = HashMap.empty[String, Int]
-
- def checkConsistent = hm.forall{ case (k,v) => arm.get(k).exists(_ == v) }
-
- assert {
- (0 to 10000).forall{ i =>
- val k = keys(rn.nextInt(keys.length))
- if (rn.nextInt(100) < 2) arm = arm.clone()
- if (rn.nextInt(100) < 5) arm.repack()
- if (rn.nextBoolean) {
- hm += ((k, i))
- rn.nextInt(6) match {
- case 0 => arm += ((k, i))
- case 1 => arm += (k, i)
- case 2 => arm(k) = i
- case 3 => arm.put(k,i)
- case 4 => arm ++= List((k,i))
- case _ => if (!arm.contains(k)) arm.getOrElseUpdate(k,i)
- else arm += (k,i)
- }
- }
- else {
- hm -= k
- rn.nextInt(2) match {
- case 0 => arm -= k
- case _ => arm --= List(k)
- }
- }
- checkConsistent
- }
- }
-
- assert {
- val mapped =
- arm.map{ case (k,v) => (if (k==null) "" else k+k) -> v.toString }
- mapped.getClass == arm.getClass
- }
-
- assert {
- val arm2 = new AnyRefMap[java.lang.Integer,Unit](2000000)
- for (i <- 0 until 1000000) arm2(java.lang.Integer.valueOf(i)) = ()
-
- arm2.size == 1000000 &&
- (0 to 1100000 by 100000).map(java.lang.Integer.valueOf).forall(i => (arm2 contains i) == i < 1000000)
- }
-
- arm = AnyRefMap("heron" -> 22, "dove" -> 5, "budgie" -> 0)
-
- assert{
- var s = ""
- arm.foreachKey(s += _)
-
- s.length == "herondovebudgie".length &&
- s.contains("heron") &&
- s.contains("dove") &&
- s.contains("budgie")
- }
-
- assert{ var s = 0L; arm.foreachValue(s += _); s == 27L }
-
- assert {
- val m2 = arm.mapValuesNow(_+2)
- arm.transformValues(_+2)
- m2 == arm && !(m2 eq arm) && (for ((_,v) <- arm) yield v).sum == 33L
- }
-
- assert {
- val arm2 = new AnyRefMap[String, String](x => if (x==null) "null" else x)
- arm2 += ("cod" -> "fish", "Rarity" -> "unicorn")
- val hm2 = (new HashMap[String,String]) ++= arm2
-
- List(null, "cod", "sparrow", "Rarity").forall(i =>
- arm2.get(i) == hm2.get(i) &&
- arm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
- arm2(i) == hm2.get(i).getOrElse(if (i==null) "null" else i.toString) &&
- arm2.getOrNull(i) == hm2.get(i).orNull
- )
- }
-}
-
diff --git a/test/files/run/mutable-longmap.scala b/test/files/run/mutable-longmap.scala
deleted file mode 100644
index 07fd80f6f0..0000000000
--- a/test/files/run/mutable-longmap.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-object Test extends App {
-
- import scala.collection.mutable.HashMap;
- import scala.collection.mutable.LongMap;
-
- val keys = Array(
- Long.MinValue, Int.MinValue - 1L, Int.MinValue, -9127, -1,
- 0, 1, 9127, Int.MaxValue, Long.MaxValue
- )
-
- val rn = new scala.util.Random(42L)
- var lm = LongMap.empty[Long]
- val hm = HashMap.empty[Long,Long]
-
- def checkConsistent = hm.forall{ case (k,v) => lm.get(k).exists(_ == v) }
-
- assert {
- (0 to 10000).forall{ i =>
- val k = keys(rn.nextInt(keys.length))
- if (rn.nextInt(100) < 2) lm = lm.clone()
- if (rn.nextInt(100) < 5) lm.repack()
- if (rn.nextBoolean) {
- hm += ((k, i))
- rn.nextInt(6) match {
- case 0 => lm += ((k, i))
- case 1 => lm += (k, i)
- case 2 => lm(k) = i
- case 3 => lm.put(k,i)
- case 4 => lm ++= List((k,i))
- case _ => if (!lm.contains(k)) lm.getOrElseUpdate(k,i)
- else lm += (k,i)
- }
- }
- else {
- hm -= k
- rn.nextInt(2) match {
- case 0 => lm -= k
- case _ => lm --= List(k)
- }
- }
- checkConsistent
- }
- }
-
- assert {
- lm.map{ case (k,v) => -k*k -> v.toString }.getClass == lm.getClass
- }
-
- assert {
- val lm2 = new LongMap[Unit](2000000)
- for (i <- 0 until 1000000) lm2(i) = ()
-
- lm2.size == 1000000 &&
- (0 to 1100000 by 100000).forall(i => (lm2 contains i) == i < 1000000)
- }
-
- lm = LongMap(8L -> 22L, -5L -> 5L, Long.MinValue -> 0L)
-
- assert{ var s = 0L; lm.foreachKey(s += _); s == Long.MinValue + 3 }
- assert{ var s = 0L; lm.foreachValue(s += _); s == 27L }
- assert {
- val m2 = lm.mapValuesNow(_+2)
- lm.transformValues(_+2)
- m2 == lm && !(m2 eq lm) && (for ((_,v) <- lm) yield v).sum == 33L
- }
-
- assert {
- val lm2 = new LongMap[String](_.toString)
- lm2 += (5L -> "fish", 0L -> "unicorn")
- val hm2 = (new HashMap[Long,String]) ++= lm2
-
- List(Long.MinValue, 0L, 1L, 5L).forall(i =>
- lm2.get(i) == hm2.get(i) &&
- lm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
- lm2(i) == hm2.get(i).getOrElse(i.toString) &&
- lm2.getOrNull(i) == hm2.get(i).orNull
- )
- }
-}
diff --git a/test/files/run/t4287inferredMethodTypes.check b/test/files/run/t4287inferredMethodTypes.check
new file mode 100644
index 0000000000..56e9c097cc
--- /dev/null
+++ b/test/files/run/t4287inferredMethodTypes.check
@@ -0,0 +1,30 @@
+[[syntax trees at end of typer]] // newSource1.scala
+[0:92]package [0:0]<empty> {
+ [0:21]class A extends [7:21][23]scala.AnyRef {
+ [8:16]<paramaccessor> private[this] val a: [8]Int = _;
+ <8:20>def <init>(<8:20>a: [11]<type: [11]scala.Int> = [17:20]A.a): [7]A = <8:20>{
+ <8:20><8:20><8:20>A.super.<init>();
+ <8:20>()
+ }
+ };
+ [23:47]object A extends [32:47][49]scala.AnyRef {
+ [49]def <init>(): [32]A.type = [49]{
+ [49][49][49]A.super.<init>();
+ [32]()
+ };
+ [36:45]private[this] val a: [40]Int = [44:45]2;
+ [40]<stable> <accessor> def a: [40]Int = [40][40]A.this.a;
+ [8]<synthetic> def <init>$default$1: [8]Int = [19]A.a
+ };
+ [49:92]class B extends [57:92][65:66]A {
+ [65]def <init>(): [57]B = [65]{
+ [65][65][65]B.super.<init>([65]A.<init>$default$1);
+ [57]()
+ };
+ [70:90]def <init>([79:80]a: [79]Int): [74]B = [84:90]{
+ [84:90][84:90][84]B.this.<init>();
+ [84]()
+ }
+ }
+}
+
diff --git a/test/files/run/t4287inferredMethodTypes.scala b/test/files/run/t4287inferredMethodTypes.scala
new file mode 100644
index 0000000000..f14e672da8
--- /dev/null
+++ b/test/files/run/t4287inferredMethodTypes.scala
@@ -0,0 +1,25 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -Yinfer-argument-types -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+ override def code = """
+class A(a: Int = A.a)
+
+object A {
+ val a = 2
+}
+
+class B extends A {
+ def this(a) = this()
+}
+ """.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 188f39ff82..760a92567c 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -10,10 +10,10 @@
[87:209]class C extends [94:209][151:159]Greeting {
[119:139]val nameElse = _;
[95:101]<paramaccessor> private[this] val i: [98:101]Int = _;
- <119:139>def <init>([95]i: [98]Int) = <119:139>{
+ <95:139>def <init>(<95:101>i: [98]Int) = <95:139>{
<119:139>val nameElse = <134:139>"Bob";
[NoPosition][NoPosition][NoPosition]super.<init>();
- [94]()
+ <95:139>()
};
[168:184]val name = [179:184]"avc";
[191:203][191:198]println([199:202]msg)
diff --git a/test/files/run/t7974.check b/test/files/run/t7974.check
new file mode 100644
index 0000000000..0be496d8d0
--- /dev/null
+++ b/test/files/run/t7974.check
@@ -0,0 +1,104 @@
+public class Symbols {
+
+ // compiled from: Symbols.scala
+
+
+
+ // access flags 0x12
+ private final Lscala/Symbol; someSymbol3
+
+ // access flags 0xA
+ private static Lscala/Symbol; symbol$1
+
+ // access flags 0xA
+ private static Lscala/Symbol; symbol$2
+
+ // access flags 0xA
+ private static Lscala/Symbol; symbol$3
+
+ // access flags 0x9
+ public static <clinit>()V
+ L0
+ LINENUMBER 2 L0
+ GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+ LDC "Symbolic1"
+ INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+ PUTSTATIC Symbols.symbol$1 : Lscala/Symbol;
+ L1
+ LINENUMBER 3 L1
+ GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+ LDC "Symbolic2"
+ INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+ PUTSTATIC Symbols.symbol$2 : Lscala/Symbol;
+ L2
+ LINENUMBER 5 L2
+ GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
+ LDC "Symbolic3"
+ INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
+ PUTSTATIC Symbols.symbol$3 : Lscala/Symbol;
+ RETURN
+ MAXSTACK = 2
+ MAXLOCALS = 0
+
+ // access flags 0x1
+ public someSymbol1()Lscala/Symbol;
+ L0
+ LINENUMBER 2 L0
+ GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+ ARETURN
+ L1
+ LOCALVARIABLE this LSymbols; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
+ // access flags 0x1
+ public someSymbol2()Lscala/Symbol;
+ L0
+ LINENUMBER 3 L0
+ GETSTATIC Symbols.symbol$2 : Lscala/Symbol;
+ ARETURN
+ L1
+ LOCALVARIABLE this LSymbols; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
+ // access flags 0x1
+ public sameSymbol1()Lscala/Symbol;
+ L0
+ LINENUMBER 4 L0
+ GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
+ ARETURN
+ L1
+ LOCALVARIABLE this LSymbols; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
+ // access flags 0x1
+ public someSymbol3()Lscala/Symbol;
+ L0
+ LINENUMBER 5 L0
+ ALOAD 0
+ GETFIELD Symbols.someSymbol3 : Lscala/Symbol;
+ ARETURN
+ L1
+ LOCALVARIABLE this LSymbols; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
+ // access flags 0x1
+ public <init>()V
+ L0
+ LINENUMBER 6 L0
+ ALOAD 0
+ INVOKESPECIAL java/lang/Object.<init> ()V
+ L1
+ LINENUMBER 5 L1
+ ALOAD 0
+ GETSTATIC Symbols.symbol$3 : Lscala/Symbol;
+ PUTFIELD Symbols.someSymbol3 : Lscala/Symbol;
+ RETURN
+ L2
+ LOCALVARIABLE this LSymbols; L0 L2 0
+ MAXSTACK = 2
+ MAXLOCALS = 1
+}
diff --git a/test/files/run/t7974/Symbols.scala b/test/files/run/t7974/Symbols.scala
new file mode 100644
index 0000000000..2363b724eb
--- /dev/null
+++ b/test/files/run/t7974/Symbols.scala
@@ -0,0 +1,6 @@
+class Symbols {
+ def someSymbol1 = 'Symbolic1
+ def someSymbol2 = 'Symbolic2
+ def sameSymbol1 = 'Symbolic1
+ val someSymbol3 = 'Symbolic3
+}
diff --git a/test/files/run/t7974/Test.scala b/test/files/run/t7974/Test.scala
new file mode 100644
index 0000000000..9403ea332b
--- /dev/null
+++ b/test/files/run/t7974/Test.scala
@@ -0,0 +1,20 @@
+import java.io.PrintWriter;
+
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm.util._
+import scala.tools.nsc.util.stringFromWriter
+
+object Test extends BytecodeTest {
+ def show {
+ val classNode = loadClassNode("Symbols", skipDebugInfo = false)
+ val textifier = new Textifier
+ classNode.accept(new TraceClassVisitor(null, textifier, null))
+
+ val classString = stringFromWriter(w => textifier.print(w))
+ val result =
+ classString.split('\n')
+ .dropWhile(elem => elem != "public class Symbols {")
+ .filterNot(elem => elem.startsWith(" @Lscala/reflect/ScalaSignature") || elem.startsWith(" ATTRIBUTE ScalaSig"))
+ result foreach println
+ }
+}
diff --git a/test/files/run/t8046.check b/test/files/run/t8046.check
new file mode 100644
index 0000000000..905b0b35ca
--- /dev/null
+++ b/test/files/run/t8046.check
@@ -0,0 +1,2 @@
+List(trait Op, trait Function1, class Object, class Any)
+BTS(T,Three.this.Op[Int],Int => Int,Object,Any)
diff --git a/test/files/run/t8046/Test.scala b/test/files/run/t8046/Test.scala
new file mode 100644
index 0000000000..f6b525d1b5
--- /dev/null
+++ b/test/files/run/t8046/Test.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ override def code = ""
+ override def extraSettings: String = "-usejavacp"
+
+ override def show() {
+ val c = newCompiler()
+ new c.Run
+ import c._
+
+ val f4 = typeOf[Three].member(newTermName("f4"))
+ val f4ParamInfo = f4.paramss.head.head.info
+ println(f4ParamInfo.baseClasses)
+ println(f4ParamInfo.baseTypeSeq)
+ }
+}
+
diff --git a/test/files/run/t8046/t8046c.scala b/test/files/run/t8046/t8046c.scala
new file mode 100644
index 0000000000..0b484da530
--- /dev/null
+++ b/test/files/run/t8046/t8046c.scala
@@ -0,0 +1,13 @@
+import language._
+
+trait One {
+ type Op[A]
+ type Alias[A] = Op[A]
+}
+
+trait Three extends One {
+ trait Op[A] extends (A => A)
+
+ def f4[T <: Alias[Int]](f: T) = 0
+}
+
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
index c5cac3ea45..fe90d7222f 100644
--- a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
+++ b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
@@ -92,7 +92,7 @@ trait ArbitraryTreesAndNames {
yield DefDef(mods, name, tparams, vparamss, tpt, rhs)
def genExistentialTypeTree(size: Int) =
- for(tpt <- genTree(size - 1); where <- smallList(size, genTree(size - 1)))
+ for(tpt <- genTree(size - 1); where <- smallList(size, oneOf(genValDef(size - 1), genTypeDef(size - 1))))
yield ExistentialTypeTree(tpt, where)
def genFunction(size: Int) =
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 6c7c32bfdf..1eb186f303 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -127,6 +127,47 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
(visited == expectedSize(r)) :| str(r)
}
+ property("sum") = forAll(myGen) { r =>
+// println("----------")
+// println("sum "+str(r))
+ val rSum = r.sum
+ val expected = r.length match {
+ case 0 => 0
+ case 1 => r.head
+ case _ => ((r.head + r.last).toLong * r.length / 2).toInt
+ }
+// println("size: " + r.length)
+// println("expected: " + expected)
+// println("obtained: " + rSum)
+
+ (rSum == expected) :| str(r)
+ }
+
+/* checks that sum respects custom Numeric */
+ property("sumCustomNumeric") = forAll(myGen) { r =>
+ val mod = 65536
+ object mynum extends Numeric[Int] {
+ def plus(x: Int, y: Int): Int = (x + y) % mod
+ override def zero = 0
+
+ def fromInt(x: Int): Int = ???
+ def minus(x: Int, y: Int): Int = ???
+ def negate(x: Int): Int = ???
+ def times(x: Int, y: Int): Int = ???
+ def toDouble(x: Int): Double = ???
+ def toFloat(x: Int): Float = ???
+ def toInt(x: Int): Int = ((x % mod) + mod * 2) % mod
+ def toLong(x: Int): Long = ???
+ def compare(x: Int, y: Int): Int = ???
+ }
+
+ val rSum = r.sum(mynum)
+ val expected = mynum.toInt(r.sum)
+
+ (rSum == expected) :| str(r)
+ }
+
+
property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
// println("length "+str(r))
(r.length == expectedSize(r)) :| str(r)
diff --git a/test/junit/scala/collection/ArraySortingTest.scala b/test/junit/scala/collection/ArraySortingTest.scala
new file mode 100644
index 0000000000..4e54b39ce7
--- /dev/null
+++ b/test/junit/scala/collection/ArraySortingTest.scala
@@ -0,0 +1,29 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+/* Tests various maps by making sure they all agree on the same answers. */
+@RunWith(classOf[JUnit4])
+class ArraySortingTest {
+
+ class CantSortMe(val i: Int) {
+ override def equals(a: Any) = throw new IllegalArgumentException("I cannot be equalled!")
+ }
+
+ object CanOrder extends Ordering[CantSortMe] {
+ def compare(a: CantSortMe, b: CantSortMe) = a.i compare b.i
+ }
+
+ // Tests SI-7837
+ @Test
+ def sortByTest() {
+ val test = Array(1,2,3,4,1,3,5,7,1,4,8,1,1,1,1)
+ val cant = test.map(i => new CantSortMe(i))
+ java.util.Arrays.sort(test)
+ scala.util.Sorting.quickSort(cant)(CanOrder)
+ assert( test(6) == 1 )
+ assert( (test,cant).zipped.forall(_ == _.i) )
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
new file mode 100644
index 0000000000..c62b074483
--- /dev/null
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -0,0 +1,479 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.{mutable => cm, immutable => ci}
+import scala.collection.JavaConverters._
+
+/* Tests various maps by making sure they all agree on the same answers. */
+@RunWith(classOf[JUnit4])
+class SetMapConsistencyTest {
+
+ trait MapBox[A] {
+ protected def oor(s: String, n: Int) = throw new IllegalArgumentException(s"Out of range for $s: $n")
+ def title: String
+ def adders: Int
+ def add(n: Int, a: A, v: Int): Unit
+ def subbers: Int
+ def sub(n: Int, a: A): Unit
+ def getters: Int
+ def get(n: Int, a: A): Int
+ def fiddlers: Int
+ def fiddle(n: Int): Unit
+ def keys: Iterator[A]
+ def has(a: A): Boolean
+ }
+
+
+ // Mutable map wrappers
+
+ class BoxMutableMap[A, M <: cm.Map[A, Int]](m0: M, title0: String) extends MapBox[A] {
+ var m = m0
+ def title = title0
+ def adders = 5
+ def add(n: Int, a: A, v: Int) { n match {
+ case 0 => m += ((a, v))
+ case 1 => m(a) = v
+ case 2 => m.put(a, v)
+ case 3 => m = (m + ((a, v))).asInstanceOf[M]
+ case 4 => m = (m ++ List((a, v))).asInstanceOf[M]
+ case _ => oor("add", n)
+ }}
+ def subbers: Int = 3
+ def sub(n: Int, a: A) { n match {
+ case 0 => m -= a
+ case 1 => m = (m - a).asInstanceOf[M]
+ case 2 => m = m.filter(_._1 != a).asInstanceOf[M]
+ case _ => oor("sub", n)
+ }}
+ def getters: Int = 3
+ def get(n: Int, a: A) = n match {
+ case 0 => m.get(a).getOrElse(-1)
+ case 1 => if (m contains a) m(a) else -1
+ case 2 => m.getOrElse(a, -1)
+ case _ => oor("get", n)
+ }
+ def fiddlers: Int = 0
+ def fiddle(n: Int) { oor("fiddle", n) }
+ def keys = m.keysIterator
+ def has(a: A) = m contains a
+ override def toString = m.toString
+ }
+
+ def boxMlm[A] = new BoxMutableMap[A, cm.ListMap[A, Int]](new cm.ListMap[A, Int], "mutable.ListMap")
+
+ def boxMhm[A] = new BoxMutableMap[A, cm.HashMap[A, Int]](new cm.HashMap[A, Int], "mutable.HashMap")
+
+ def boxMohm[A] = new BoxMutableMap[A, cm.OpenHashMap[A, Int]](new cm.OpenHashMap[A, Int], "mutable.OpenHashMap")
+
+ def boxMarm[A <: AnyRef] = new BoxMutableMap[A, cm.AnyRefMap[A, Int]](new cm.AnyRefMap[A, Int](_ => -1), "mutable.AnyRefMap") {
+ private def arm: cm.AnyRefMap[A, Int] = m.asInstanceOf[cm.AnyRefMap[A, Int]]
+ override def adders = 3
+ override def subbers = 1
+ override def getters: Int = 4
+ override def get(n: Int, a: A) = n match {
+ case 0 => m.get(a).getOrElse(-1)
+ case 1 => m(a)
+ case 2 => m.getOrElse(a, -1)
+ case 3 => val x = arm.getOrNull(a); if (x==0 && !(arm contains a)) -1 else x
+ case _ => oor("get", n)
+ }
+ override def fiddlers = 2
+ override def fiddle(n: Int) { n match {
+ case 0 => m = arm.clone
+ case 1 => arm.repack
+ case _ => oor("fiddle", n)
+ }}
+ }
+
+ def boxMjm = new BoxMutableMap[Long, cm.LongMap[Int]](new cm.LongMap[Int](_ => -1), "mutable.LongMap") {
+ private def lm: cm.LongMap[Int] = m.asInstanceOf[cm.LongMap[Int]]
+ override def adders = 3
+ override def subbers = 1
+ override def getters: Int = 4
+ override def get(n: Int, a: Long) = n match {
+ case 0 => m.get(a).getOrElse(-1)
+ case 1 => m(a)
+ case 2 => m.getOrElse(a, -1)
+ case 3 => val x = lm.getOrNull(a); if (x==0 && !(lm contains a)) -1 else x
+ case _ => oor("get", n)
+ }
+ override def fiddlers = 2
+ override def fiddle(n: Int) { n match {
+ case 0 => m = lm.clone
+ case 1 => lm.repack
+ case _ => oor("fiddle", n)
+ }}
+ }
+
+ def boxJavaM[A] = new BoxMutableMap[A, cm.Map[A, Int]]((new java.util.HashMap[A, Int]).asScala, "java.util.HashMap") {
+ override def adders = 3
+ override def subbers = 1
+ }
+
+
+ // Immutable map wrappers
+
+ class BoxImmutableMap[A, M <: ci.Map[A, Int]](m0: M, title0: String) extends MapBox[A] {
+ var m = m0
+ def title = title0
+ def adders = 2
+ def add(n: Int, a: A, v: Int) { n match {
+ case 0 => m = (m + ((a, v))).asInstanceOf[M]
+ case 1 => m = (m ++ List((a, v))).asInstanceOf[M]
+ case _ => oor("add", n)
+ }}
+ def subbers: Int = 2
+ def sub(n: Int, a: A) { n match {
+ case 0 => m = (m - a).asInstanceOf[M]
+ case 1 => m = m.filter(_._1 != a).asInstanceOf[M]
+ case _ => oor("sub", n)
+ }}
+ def getters: Int = 3
+ def get(n: Int, a: A) = n match {
+ case 0 => m.get(a).getOrElse(-1)
+ case 1 => if (m contains a) m(a) else -1
+ case 2 => m.getOrElse(a, -1)
+ case _ => oor("get", n)
+ }
+ def fiddlers: Int = 0
+ def fiddle(n: Int) { oor("fiddle", n) }
+ def keys = m.keysIterator
+ def has(a: A) = m contains a
+ override def toString = m.toString
+ }
+
+ def boxIhm[A] = new BoxImmutableMap[A, ci.HashMap[A,Int]](new ci.HashMap[A, Int], "immutable.HashMap")
+
+ def boxIim = new BoxImmutableMap[Int, ci.IntMap[Int]](ci.IntMap.empty[Int], "immutable.IntMap")
+
+ def boxIjm = new BoxImmutableMap[Long, ci.LongMap[Int]](ci.LongMap.empty[Int], "immutable.LongMap")
+
+ def boxIlm[A] = new BoxImmutableMap[A, ci.ListMap[A, Int]](new ci.ListMap[A, Int], "immutable.ListMap")
+
+ def boxItm[A: Ordering] = new BoxImmutableMap[A, ci.TreeMap[A, Int]](new ci.TreeMap[A, Int], "immutable.TreeMap")
+
+
+ // Mutable set wrappers placed into the same framework (everything returns 0)
+
+ class BoxMutableSet[A, M <: cm.Set[A]](s0: M, title0: String) extends MapBox[A] {
+ protected var m = s0
+ def title = title0
+ def adders = 5
+ def add(n: Int, a: A, v: Int) { n match {
+ case 0 => m += a
+ case 1 => m(a) = true
+ case 2 => m add a
+ case 3 => m = (m + a).asInstanceOf[M]
+ case 4 => m = (m ++ List(a)).asInstanceOf[M]
+ case _ => oor("add", n)
+ }}
+ def subbers: Int = 3
+ def sub(n: Int, a: A) { n match {
+ case 0 => m -= a
+ case 1 => m = (m - a).asInstanceOf[M]
+ case 2 => m = m.filter(_ != a).asInstanceOf[M]
+ case _ => oor("sub", n)
+ }}
+ def getters: Int = 1
+ def get(n: Int, a: A) = if (m(a)) 0 else -1
+ def fiddlers: Int = 0
+ def fiddle(n: Int) { oor("fiddle", n) }
+ def keys = m.iterator
+ def has(a: A) = m(a)
+ override def toString = m.toString
+ }
+
+ def boxMbs = new BoxMutableSet[Int, cm.BitSet](new cm.BitSet, "mutable.BitSet")
+
+ def boxMhs[A] = new BoxMutableSet[A, cm.HashSet[A]](new cm.HashSet[A], "mutable.HashSet")
+
+ def boxJavaS[A] = new BoxMutableSet[A, cm.Set[A]]((new java.util.HashSet[A]).asScala, "java.util.HashSet") {
+ override def adders = 3
+ override def subbers = 1
+ }
+
+
+ // Immutable set wrappers placed into the same framework (everything returns 0)
+
+ class BoxImmutableSet[A, M <: ci.Set[A]](s0: M, title0: String) extends MapBox[A] {
+ protected var m = s0
+ def title = title0
+ def adders = 2
+ def add(n: Int, a: A, v: Int) { n match {
+ case 0 => m = (m + a).asInstanceOf[M]
+ case 1 => m = (m ++ List(a)).asInstanceOf[M]
+ case _ => oor("add", n)
+ }}
+ def subbers: Int = 2
+ def sub(n: Int, a: A) { n match {
+ case 0 => m = (m - a).asInstanceOf[M]
+ case 1 => m = m.filter(_ != a).asInstanceOf[M]
+ case _ => oor("sub", n)
+ }}
+ def getters: Int = 1
+ def get(n: Int, a: A) = if (m(a)) 0 else -1
+ def fiddlers: Int = 0
+ def fiddle(n: Int) { oor("fiddle", n) }
+ def keys = m.iterator
+ def has(a: A) = m(a)
+ override def toString = m.toString
+ }
+
+ def boxIbs = new BoxImmutableSet[Int, ci.BitSet](ci.BitSet.empty, "immutable.BitSet")
+
+ def boxIhs[A] = new BoxImmutableSet[A, ci.HashSet[A]](ci.HashSet.empty[A], "mutable.HashSet")
+
+ def boxIls[A] = new BoxImmutableSet[A, ci.ListSet[A]](ci.ListSet.empty[A], "mutable.ListSet")
+
+ def boxIts[A: Ordering] = new BoxImmutableSet[A, ci.TreeSet[A]](ci.TreeSet.empty[A], "mutable.TreeSet")
+
+
+ // Random operations on maps
+ def churn[A](map1: MapBox[A], map2: MapBox[A], keys: Array[A], n: Int = 1000, seed: Int = 42, valuer: Int => Int = identity) = {
+ def check = map1.keys.forall(map2 has _) && map2.keys.forall(map1 has _)
+ val rn = new scala.util.Random(seed)
+ var what = new StringBuilder
+ what ++= "creation"
+ for (i <- 0 until n) {
+ if (!check) {
+ val temp = map2 match {
+ case b: BoxImmutableMap[_, _] => b.m match {
+ case hx: ci.HashMap.HashTrieMap[_,_] =>
+ val h = hx.asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+ Some((h.bitmap.toHexString, h.elems.mkString, h.size))
+ case _ => None
+ }
+ case _ => None
+ }
+ throw new Exception(s"Disagreement after ${what.result} between ${map1.title} and ${map2.title} because ${map1.keys.map(map2 has _).mkString(",")} ${map2.keys.map(map1 has _).mkString(",")} at step $i:\n$map1\n$map2\n$temp")
+ }
+ what ++= " (%d) ".format(i)
+ if (rn.nextInt(10)==0) {
+
+ if (map1.fiddlers > 0) map1.fiddle({
+ val n = rn.nextInt(map1.fiddlers)
+ what ++= ("f"+n)
+ n
+ })
+ if (map2.fiddlers > 0) map2.fiddle({
+ val n = rn.nextInt(map2.fiddlers)
+ what ++= ("F"+n)
+ n
+ })
+ }
+ if (rn.nextBoolean) {
+ val idx = rn.nextInt(keys.length)
+ val key = keys(rn.nextInt(keys.length))
+ val n1 = rn.nextInt(map1.adders)
+ val n2 = rn.nextInt(map2.adders)
+ what ++= "+%s(%d,%d)".format(key,n1,n2)
+ map1.add(n1, key, valuer(idx))
+ map2.add(n2, key, valuer(idx))
+ }
+ else {
+ val n = rn.nextInt(keys.length)
+ val key = keys(n)
+ val n1 = rn.nextInt(map1.subbers)
+ val n2 = rn.nextInt(map2.subbers)
+ what ++= "-%s(%d,%d)".format(key, n1, n2)
+ //println(s"- $key")
+ map1.sub(n1, key)
+ map2.sub(n2, key)
+ }
+ val j = rn.nextInt(keys.length)
+ val gn1 = rn.nextInt(map1.getters)
+ val gn2 = rn.nextInt(map2.getters)
+ val g1 = map1.get(gn1, keys(j))
+ val g2 = map2.get(gn2, keys(j))
+ if (g1 != g2) {
+ val temp = map2 match {
+ case b: BoxImmutableMap[_, _] => b.m match {
+ case hx: ci.HashMap.HashTrieMap[_,_] =>
+ val h = hx.asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+ val y = (ci.HashMap.empty[A, Int] ++ h).asInstanceOf[ci.HashMap.HashTrieMap[A, Int]]
+ Some(((h.bitmap.toHexString, h.elems.mkString, h.size),(y.bitmap.toHexString, y.elems.mkString, y.size)))
+ case _ => None
+ }
+ case _ => None
+ }
+ throw new Exception(s"Disagreement after ${what.result} between ${map1.title} and ${map2.title} on get of ${keys(j)} (#$j) on step $i: $g1 != $g2 using methods $gn1 and $gn2 resp.; in full\n$map1\n$map2\n$temp")
+ }
+ }
+ true
+ }
+
+
+ // Actual tests
+ val smallKeys = Array(0, 1, 42, 9127)
+ val intKeys = smallKeys ++ Array(-1, Int.MaxValue, Int.MinValue, -129385)
+ val longKeys = intKeys.map(_.toLong) ++ Array(Long.MaxValue, Long.MinValue, 1397198789151L, -41402148014L)
+ val stringKeys = intKeys.map(_.toString) ++ Array("", null)
+ val anyKeys = stringKeys.filter(_ != null) ++ Array(0L) ++ Array(true) ++ Array(math.Pi)
+
+ @Test
+ def churnIntMaps() {
+ val maps = Array[() => MapBox[Int]](
+ () => boxMlm[Int], () => boxMhm[Int], () => boxMohm[Int], () => boxJavaM[Int],
+ () => boxIim, () => boxIhm[Int], () => boxIlm[Int], () => boxItm[Int]
+ )
+ assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), intKeys, 2000) } )
+ }
+
+ @Test
+ def churnLongMaps() {
+ val maps = Array[() => MapBox[Long]](
+ () => boxMjm, () => boxIjm, () => boxJavaM[Long],
+ () => boxMlm[Long], () => boxMhm[Long], () => boxMohm[Long], () => boxIhm[Long], () => boxIlm[Long]
+ )
+ assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), longKeys, 10000) } )
+ }
+
+ @Test
+ def churnStringMaps() {
+ // Note: OpenHashMap and TreeMap won't store null, so skip strings
+ val maps = Array[() => MapBox[String]](
+ () => boxMlm[String], () => boxMhm[String], () => boxMarm[String], () => boxJavaM[String],
+ () => boxIhm[String], () => boxIlm[String]
+ )
+ assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), stringKeys, 5000) } )
+ }
+
+ @Test
+ def churnAnyMaps() {
+ val maps = Array[() => MapBox[Any]](
+ () => boxMlm[Any], () => boxMhm[Any], () => boxMohm[Any], () => boxJavaM[Any], () => boxIhm[Any], () => boxIlm[Any]
+ )
+ assert( maps.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), anyKeys, 10000) } )
+ }
+
+ @Test
+ def churnIntSets() {
+ val sets = Array[() => MapBox[Int]](
+ () => boxMhm[Int], () => boxIhm[Int], () => boxJavaS[Int],
+ () => boxMbs, () => boxMhs[Int], () => boxIbs, () => boxIhs[Int], () => boxIls[Int], () => boxIts[Int]
+ )
+ assert( sets.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), smallKeys, 1000, valuer = _ => 0) } )
+ }
+
+ @Test
+ def churnAnySets() {
+ val sets = Array[() => MapBox[Any]](
+ () => boxMhm[Any], () => boxIhm[Any], () => boxJavaS[Any],
+ () => boxMhs[Any], () => boxIhs[Any], () => boxIls[Any]
+ )
+ assert( sets.sliding(2).forall{ ms => churn(ms(0)(), ms(1)(), anyKeys, 10000, valuer = _ => 0) } )
+ }
+
+ @Test
+ def extraMutableLongMapTests() {
+ import cm.{LongMap, HashMap}
+ var lm = LongMap.empty[Long]
+ longKeys.zipWithIndex.foreach{ case (k,i) => lm(k) = i }
+ assert{ lm.map{ case (k,v) => -k*k -> v.toString }.getClass == lm.getClass }
+
+ assert {
+ val lm2 = new LongMap[Unit](2000000)
+ for (i <- 0 until 1000000) lm2(i) = ()
+
+ lm2.size == 1000000 &&
+ (0 to 1100000 by 100000).forall(i => (lm2 contains i) == i < 1000000)
+ }
+
+ lm = LongMap(8L -> 22L, -5L -> 5L, Long.MinValue -> 0L)
+
+ assert{ var s = 0L; lm.foreachKey(s += _); s == Long.MinValue + 3 }
+ assert{ var s = 0L; lm.foreachValue(s += _); s == 27L }
+ assert {
+ val m2 = lm.mapValuesNow(_+2)
+ lm.transformValues(_+2)
+ m2 == lm && !(m2 eq lm) && (for ((_,v) <- lm) yield v).sum == 33L
+ }
+
+ assert {
+ val lm2 = new LongMap[String](_.toString)
+ lm2 += (5L -> "fish", 0L -> "unicorn")
+ val hm2 = (new HashMap[Long,String]) ++= lm2
+ List(Long.MinValue, 0L, 1L, 5L).forall(i =>
+ lm2.get(i) == hm2.get(i) &&
+ lm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
+ lm2(i) == hm2.get(i).getOrElse(i.toString) &&
+ lm2.getOrNull(i) == hm2.get(i).orNull
+ )
+ }
+ }
+
+ @Test
+ def extraMutableAnyRefMapTests() {
+ import cm.{AnyRefMap, HashMap}
+ var arm = AnyRefMap.empty[String, Int]
+ stringKeys.zipWithIndex.foreach{ case (k,i) => arm(k) = i }
+
+ assert{ arm.map{ case (k,v) => (if (k==null) "" else k+k) -> v.toString }.getClass == arm.getClass }
+
+ assert {
+ val arm2 = new AnyRefMap[java.lang.Integer,Unit](2000000)
+ for (i <- 0 until 1000000) arm2(java.lang.Integer.valueOf(i)) = ()
+ arm2.size == 1000000 &&
+ (0 to 1100000 by 100000).map(java.lang.Integer.valueOf).forall(i => (arm2 contains i) == i < 1000000)
+ }
+
+ arm = AnyRefMap("heron" -> 22, "dove" -> 5, "budgie" -> 0)
+
+ assert{
+ var s = ""
+ arm.foreachKey(s += _)
+ s.length == "herondovebudgie".length &&
+ s.contains("heron") &&
+ s.contains("dove") &&
+ s.contains("budgie")
+ }
+
+ assert{ var s = 0L; arm.foreachValue(s += _); s == 27L }
+
+ assert {
+ val m2 = arm.mapValuesNow(_+2)
+ arm.transformValues(_+2)
+ m2 == arm && !(m2 eq arm) && (for ((_,v) <- arm) yield v).sum == 33L
+ }
+
+ assert {
+ val arm2 = new AnyRefMap[String, String](x => if (x==null) "null" else x)
+ arm2 += ("cod" -> "fish", "Rarity" -> "unicorn")
+ val hm2 = (new HashMap[String,String]) ++= arm2
+ List(null, "cod", "sparrow", "Rarity").forall(i =>
+ arm2.get(i) == hm2.get(i) &&
+ arm2.getOrElse(i, "") == hm2.getOrElse(i, "") &&
+ arm2(i) == hm2.get(i).getOrElse(if (i==null) "null" else i.toString) &&
+ arm2.getOrNull(i) == hm2.get(i).orNull
+ )
+ }
+ }
+
+ @Test
+ def extraFilterTests() {
+ type M = scala.collection.Map[Int, Boolean]
+ val manyKVs = (0 to 1000).map(i => i*i*i).map(x => x -> ((x*x*x) < 0))
+ val rn = new scala.util.Random(42)
+ def mhm: M = { val m = new cm.HashMap[Int, Boolean]; m ++= manyKVs; m }
+ def mohm: M = { val m = new cm.OpenHashMap[Int, Boolean]; m ++= manyKVs; m }
+ def ihm: M = ci.HashMap.empty[Int, Boolean] ++ manyKVs
+ val densities = List(0, 0.05, 0.2, 0.5, 0.8, 0.95, 1)
+ def repeat = rn.nextInt(100) < 33
+ def pick(m: M, density: Double) = m.keys.filter(_ => rn.nextDouble < density).toSet
+ def test: Boolean = {
+ for (i <- 0 to 100) {
+ var ms = List(mhm, mohm, ihm)
+ do {
+ val density = densities(rn.nextInt(densities.length))
+ val keep = pick(ms.head, density)
+ ms = ms.map(_.filter(keep contains _._1))
+ if (!ms.sliding(2).forall(s => s(0) == s(1))) return false
+ } while (repeat)
+ }
+ true
+ }
+ assert(test)
+ }
+}
diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala
new file mode 100644
index 0000000000..4f0657f471
--- /dev/null
+++ b/test/junit/scala/math/NumericTest.scala
@@ -0,0 +1,18 @@
+
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class NumericTest {
+
+ /* Test for SI-8102 */
+ @Test
+ def testAbs {
+ assertTrue(-0.0.abs equals 0.0)
+ assertTrue(-0.0f.abs equals 0.0f)
+ }
+}
+
diff --git a/test/pending/pos/t6161.scala b/test/pending/pos/t6161.scala
new file mode 100644
index 0000000000..5783cc85f2
--- /dev/null
+++ b/test/pending/pos/t6161.scala
@@ -0,0 +1,22 @@
+object t6161 {
+ trait N {
+ type Name
+ }
+
+ trait N1 extends N {
+ class Name {
+ type ThisNameType <: Name
+ def encode: ThisNameType = ???
+ }
+ }
+
+ trait S {
+ self: N => // change to N1 and it compiles
+ type NameType <: Name
+ }
+
+ object g extends S with N1
+
+ val n1: g.NameType = ???
+ val n2: g.Name = n1.encode
+}
diff --git a/test/pending/presentation/context-bounds1.check b/test/pending/presentation/context-bounds1.check
new file mode 100644
index 0000000000..b444de59a4
--- /dev/null
+++ b/test/pending/presentation/context-bounds1.check
@@ -0,0 +1,51 @@
+reload: ContextBounds.scala
+
+askHyperlinkPos for `Blubb` at (2,23) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Foo` at (4,17) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Blubb` at (4,32) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Blubb` at (13,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (4,42) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (4,51) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `blubb` at (4,66) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `blubb` at (2,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `Foo` at (5,18) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `Foo` at (9,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (5,25) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (4,12) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `foo` at (5,36) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (10,7) ContextBounds.scala
+================================================================================
+
+askHyperlinkPos for `A` at (10,14) ContextBounds.scala
+================================================================================
+[response] found askHyperlinkPos for `A` at (9,11) ContextBounds.scala
+================================================================================
diff --git a/test/pending/presentation/context-bounds1/Test.scala b/test/pending/presentation/context-bounds1/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/pending/presentation/context-bounds1/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/pending/presentation/context-bounds1/src/ContextBounds.scala b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
new file mode 100644
index 0000000000..72a8f694a3
--- /dev/null
+++ b/test/pending/presentation/context-bounds1/src/ContextBounds.scala
@@ -0,0 +1,13 @@
+object ContextBound {
+ val blubb = new Blubb/*#*/
+
+ def work[A: Foo/*#*/](f: Blubb/*#*/ => A/*#*/): A/*#*/ = f(blubb/*#*/) ensuring {
+ implicitly[Foo/*#*/[A/*#*/]].foo/*#*/(_) >= 42
+ }
+}
+
+trait Foo[A] {
+ def foo(a: A/*#*/): Int
+}
+
+class Blubb \ No newline at end of file
diff --git a/test/files/run/reflection-sync-potpourri.scala b/test/pending/run/reflection-sync-potpourri.scala
index 0ad5f2ab66..0ad5f2ab66 100644
--- a/test/files/run/reflection-sync-potpourri.scala
+++ b/test/pending/run/reflection-sync-potpourri.scala
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812.check
index 619c56180b..3be8a300e7 100644
--- a/test/scaladoc/run/SI-6812.check
+++ b/test/scaladoc/run/SI-6812.check
@@ -1 +1,2 @@
+warning: -Ymacro-no-expand is deprecated: Use -Ymacro-expand:none
Done.
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
index 059c327e7e..6893e816d0 100644
--- a/test/scaladoc/run/SI-6812.scala
+++ b/test/scaladoc/run/SI-6812.scala
@@ -19,6 +19,6 @@ object Test extends ScaladocModelTest {
"""
def scaladocSettings = ""
- override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
+ override def extraSettings = super.extraSettings + " -Ymacro-no-expand -deprecation"
def testModel(root: Package) = ()
}
diff --git a/test/scaladoc/run/SI-6812b.check b/test/scaladoc/run/SI-6812b.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6812b.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6812b.scala b/test/scaladoc/run/SI-6812b.scala
new file mode 100644
index 0000000000..b8a8140357
--- /dev/null
+++ b/test/scaladoc/run/SI-6812b.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.reflect.macros.BlackboxContext
+ import language.experimental.macros
+
+ object Macros {
+ def impl(c: BlackboxContext) = c.literalUnit
+ def foo: Unit = macro impl
+ }
+
+ class C {
+ def bar = Macros.foo
+ }
+ """
+
+ def scaladocSettings = ""
+ override def extraSettings = super.extraSettings + " -Ymacro-expand:none"
+ def testModel(root: Package) = ()
+}
diff --git a/versions.properties b/versions.properties
index 90448dc49a..62ba7ac379 100644
--- a/versions.properties
+++ b/versions.properties
@@ -12,6 +12,10 @@ scala-continuations-plugin.version.number=1.0.0-RC2
scala-continuations-library.version.number=1.0.0-RC2
scala-swing.version.number=1.0.0-RC2
+# these ship with distribution (and scala-library-all depends on them)
+akka-actor.version.number=2.2.3
+actors-migration.version.number=1.0.0
+
# external modules, used internally (not shipped)
partest.version.number=1.0.0-RC8
scalacheck.version.number=1.11.1