summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.mailmap2
-rw-r--r--build-ant-macros.xml26
-rwxr-xr-xbuild.xml30
-rw-r--r--src/build/maven/scala-actors-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-doc-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-interactive-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-pom.xml13
-rw-r--r--src/build/maven/scala-dist-pom.xml11
-rw-r--r--src/build/maven/scala-library-all-pom.xml11
-rw-r--r--src/build/maven/scala-library-pom.xml11
-rw-r--r--src/build/maven/scala-reflect-pom.xml11
-rw-r--r--src/build/maven/scalap-pom.xml11
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala14
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala55
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala119
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala33
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala21
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala32
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala13
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala6
-rw-r--r--src/compiler/scala/tools/reflect/FormatInterpolator.scala329
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala171
-rw-r--r--src/compiler/scala/tools/reflect/ReflectGlobal.scala7
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala6
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala12
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala97
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala37
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala46
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala6
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala63
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/test-junit/.classpath1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala16
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala20
-rw-r--r--src/library/scala/collection/immutable/List.scala63
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala16
-rw-r--r--src/library/scala/collection/immutable/Set.scala46
-rw-r--r--src/library/scala/collection/immutable/Stream.scala8
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala1
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala42
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala15
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala40
-rw-r--r--src/library/scala/concurrent/Future.scala8
-rw-r--r--src/library/scala/util/matching/Regex.scala40
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala39
-rw-r--r--src/reflect/scala/reflect/api/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala2
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala2
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala8
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala274
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala20
-rw-r--r--src/reflect/scala/reflect/internal/FreshNames.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala35
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala3
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala111
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala157
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala24
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala145
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala12
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala8
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala2
-rw-r--r--src/reflect/scala/reflect/internal/transform/PostErasure.scala19
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala11
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala34
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala16
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Evals.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala284
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala15
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala117
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala27
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala4
-rw-r--r--test/disabled/run/t7843-jsr223-service.scala18
-rw-r--r--test/files/filters2
-rw-r--r--test/files/jvm/t6941/Analyzed_1.scala2
-rw-r--r--test/files/neg/abstract-report2.check8
-rw-r--r--test/files/neg/abstract-report2.scala6
-rw-r--r--test/files/neg/accesses.check6
-rw-r--r--test/files/neg/accesses2.check10
-rw-r--r--test/files/neg/missing-param-type-tuple.check6
-rw-r--r--test/files/neg/names-defaults-neg.check2
-rw-r--r--test/files/neg/no-implicit-to-anyref-any-val.check34
-rw-r--r--test/files/neg/no-implicit-to-anyref-any-val.scala (renamed from test/files/neg/no-implicit-to-anyref.scala)4
-rw-r--r--test/files/neg/no-implicit-to-anyref.check22
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.check5
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.scala3
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.check134
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.scala45
-rw-r--r--test/files/neg/t6844.check6
-rw-r--r--test/files/neg/t6844.scala5
-rw-r--r--test/files/neg/t7325.check8
-rw-r--r--test/files/neg/t8143a.check5
-rw-r--r--test/files/neg/t8143a.scala15
-rw-r--r--test/files/neg/t8182.check22
-rw-r--r--test/files/neg/t8182.scala18
-rw-r--r--test/files/neg/t8207.check7
-rw-r--r--test/files/neg/t8207.scala3
-rw-r--r--test/files/neg/t8228.check4
-rw-r--r--test/files/neg/t8228.scala7
-rw-r--r--test/files/neg/t8237-default.check13
-rw-r--r--test/files/neg/t8237-default.scala29
-rw-r--r--test/files/pos/annotated-original/M_1.scala2
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--test/files/pos/implicit-anyval-2.10.flags1
-rw-r--r--test/files/pos/implicit-anyval-2.10.scala3
-rw-r--r--test/files/pos/t2066-2.10-compat.flags1
-rw-r--r--test/files/pos/t2066-2.10-compat.scala71
-rw-r--r--test/files/pos/t6169/Exist.java4
-rw-r--r--test/files/pos/t6169/ExistF.java4
-rw-r--r--test/files/pos/t6169/ExistIndir.java4
-rw-r--r--test/files/pos/t6169/OP.java1
-rw-r--r--test/files/pos/t6169/Skin.java1
-rw-r--r--test/files/pos/t6169/Skinnable.java3
-rw-r--r--test/files/pos/t6169/skinnable.scala14
-rw-r--r--test/files/pos/t6169/t6169.scala7
-rw-r--r--test/files/pos/t7322.scala11
-rw-r--r--test/files/pos/t7377/Macro_1.scala2
-rw-r--r--test/files/pos/t7516/A_1.scala2
-rw-r--r--test/files/pos/t7919.scala6
-rw-r--r--test/files/pos/t8064/Macro_1.scala2
-rw-r--r--test/files/pos/t8170.scala27
-rw-r--r--test/files/pos/t8170b.scala25
-rw-r--r--test/files/pos/t8207.scala6
-rw-r--r--test/files/pos/t8237.scala29
-rw-r--r--test/files/pos/t8237b.scala10
-rw-r--r--test/files/run/global-showdef.scala2
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala2
-rw-r--r--test/files/run/private-override.scala17
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.check2
-rw-r--r--test/files/run/reflection-sync-potpourri.scala (renamed from test/pending/run/reflection-sync-potpourri.scala)2
-rw-r--r--test/files/run/stringinterpolation_macro-run.check5
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala15
-rw-r--r--test/files/run/t6261.scala7
-rw-r--r--test/files/run/t6411a.check96
-rw-r--r--test/files/run/t6411a.scala81
-rw-r--r--test/files/run/t6411b.check1
-rw-r--r--test/files/run/t6411b.scala12
-rw-r--r--test/files/run/t7240/Macros_1.scala2
-rw-r--r--test/files/run/t7319.check2
-rw-r--r--test/files/run/t7328.check4
-rw-r--r--test/files/run/t7328.scala18
-rw-r--r--test/files/run/t7445.scala6
-rw-r--r--test/files/run/t7700.check2
-rw-r--r--test/files/run/t7700.scala17
-rw-r--r--test/files/run/t7843-jsr223-service.check (renamed from test/disabled/run/t7843-jsr223-service.check)0
-rw-r--r--test/files/run/t7843-jsr223-service.scala8
-rw-r--r--test/files/run/t7933.check2
-rw-r--r--test/files/run/t7933.scala11
-rw-r--r--test/files/run/t8199.scala105
-rw-r--r--test/files/run/t8233-bcode.flags1
-rw-r--r--test/files/run/t8233-bcode.scala18
-rw-r--r--test/files/run/t8233.scala18
-rw-r--r--test/files/run/t8245.scala14
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala67
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala106
-rw-r--r--test/files/scalacheck/quasiquotes/ErrorProps.scala28
-rw-r--r--test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala12
-rw-r--r--test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala5
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala82
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala53
-rw-r--r--test/files/scalacheck/quasiquotes/TypeConstructionProps.scala8
-rw-r--r--test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala38
-rw-r--r--test/files/scalacheck/quasiquotes/TypecheckedProps.scala9
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala31
-rw-r--r--test/junit/scala/reflect/internal/util/SourceFileTest.scala55
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala1
-rw-r--r--test/osgi/src/BasicReflection.scala2
-rw-r--r--test/osgi/src/ScalaOsgiHelper.scala6
-rw-r--r--test/pending/run/idempotency-partial-functions.scala2
-rw-r--r--test/scaladoc/resources/SI-4014_0.scala4
-rw-r--r--test/scaladoc/resources/SI-4014_1.scala5
-rw-r--r--test/scaladoc/resources/SI-4014_2.scala6
-rw-r--r--test/scaladoc/run/t7124.check3
-rw-r--r--test/scaladoc/run/t7124.scala22
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala46
-rw-r--r--versions.properties11
215 files changed, 3670 insertions, 1328 deletions
diff --git a/.mailmap b/.mailmap
index 810bda030c..7cab5ed019 100644
--- a/.mailmap
+++ b/.mailmap
@@ -10,6 +10,7 @@ Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@lampmac14.epfl.ch>
Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <axel22@gmail.com>
Alex Cruise <alex@cluonflux.com>
Alex Cruise <alex@cluonflux.com> <alex@metaforsoftware.com>
+A. P. Marki <som.snytt@gmail.com>
Antonio Cunei <antonio.cunei@typesafe.com>
Antonio Cunei <antonio.cunei@typesafe.com> <antonio.cunei@epfl.ch>
Buraq Emir <buraq@epfl.ch>
@@ -71,5 +72,6 @@ Unknown Committer <lost.soul@typesafe.com> <USER@epfl.ch>
Unknown Committer <lost.soul@typesafe.com> <noreply@epfl.ch>
Viktor Klang <viktor.klang@gmail.com>
Vincent Cremet <cremet@epfl.ch>
+Vladimir Nikolaev <vladimir.nikolaev9@gmail.com>
Vojin Jovanovic <vojin.jovanovic@epfl.ch>
Vojin Jovanovic <vojin.jovanovic@epfl.ch> <gvojin@gmail.com>
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
index 0b92f1dab1..458d1014c2 100644
--- a/build-ant-macros.xml
+++ b/build-ant-macros.xml
@@ -23,15 +23,35 @@
</sequential>
</macrodef>
+ <!-- Set a property @{name}.cross to the actual cross suffix that should be
+ used when resolving the module "@{name}". If the (user-supplied)
+ @{name}.cross.suffix property exists then use that value, otherwise use
+ "_${scala.binary.version}". -->
+ <macrodef name="prepareCross">
+ <attribute name="name" />
+ <sequential>
+ <if>
+ <isset property="@{name}.cross.suffix" />
+ <then>
+ <property name="@{name}.cross" value="${@{name}.cross.suffix}" />
+ </then>
+ <else>
+ <property name="@{name}.cross" value="_${scala.binary.version}" />
+ </else>
+ </if>
+ </sequential>
+ </macrodef>
+
<!-- Set property named @{name} to the jar resolved as @{jar}_${scala.binary.version}:jar.
@{jar}_${scala.binary.version} must be a maven dependency. -->
<macrodef name="propertyForCrossedArtifact">
<attribute name="name"/>
<attribute name="jar"/>
+ <attribute name="suffix" default="${@{name}.cross}"/>
<sequential>
- <readProperty name="@{name}" property="@{jar}_${scala.binary.version}:jar"/>
- <readProperty name="@{name}-sources" property="@{jar}_${scala.binary.version}:java-source:sources"/>
- <readProperty name="@{name}-javadoc" property="@{jar}_${scala.binary.version}:java-source:javadoc"/>
+ <readProperty name="@{name}" property="@{jar}@{suffix}:jar"/>
+ <readProperty name="@{name}-sources" property="@{jar}@{suffix}:java-source:sources"/>
+ <readProperty name="@{name}-javadoc" property="@{jar}@{suffix}:java-source:javadoc"/>
</sequential>
</macrodef>
diff --git a/build.xml b/build.xml
index 00ad5f78e9..37c544fcdc 100755
--- a/build.xml
+++ b/build.xml
@@ -276,6 +276,16 @@ TODO:
<artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
+ <!-- prepare, for each of the names below, the property "@{name}.cross", set to the
+ necessary cross suffix (usually something like "_2.11.0-M6". -->
+ <prepareCross name="scala-xml" />
+ <prepareCross name="scala-parser-combinators" />
+ <prepareCross name="scala-continuations-plugin" />
+ <prepareCross name="scala-continuations-library"/>
+ <prepareCross name="scala-swing"/>
+ <prepareCross name="partest"/>
+ <prepareCross name="scalacheck"/>
+
<!-- TODO: delay until absolutely necessary to allow minimal build, also move out partest dependency from scaladoc -->
<artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
<!-- uncomment the following if you're deploying your own partest locally -->
@@ -284,13 +294,13 @@ TODO:
(we don't distribute partest with Scala, so the risk of sonatype and maven being out of synch is irrelevant):
-->
<artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-partest_${scala.binary.version}" version="${partest.version.number}" />
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-partest${partest.cross}" version="${partest.version.number}" />
</artifact:dependencies>
<copy-deps project="partest"/>
<artifact:dependencies pathId="scalacheck.classpath" filesetId="scalacheck.fileset" versionsId="scalacheck.versions">
<artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scalacheck" artifactId="scalacheck_${scala.binary.version}" version="${scalacheck.version.number}" />
+ <dependency groupId="org.scalacheck" artifactId="scalacheck${scalacheck.cross}" version="${scalacheck.version.number}" />
</artifact:dependencies>
<artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.fileset" versionsId="repl.deps.versions">
@@ -302,11 +312,11 @@ TODO:
must specify sourcesFilesetId, javadocFilesetId to download these types of artifacts -->
<artifact:dependencies pathId="external-modules.deps.classpath" sourcesFilesetId="external-modules.sources.fileset" javadocFilesetId="external-modules.javadoc.fileset">
<artifact:remoteRepository refid="extra-repo"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-xml_${scala.binary.version}" version="${scala-xml.version.number}"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-parser-combinators_${scala.binary.version}" version="${scala-parser-combinators.version.number}"/>
- <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-plugin_${scala.binary.version}" version="${scala-continuations-plugin.version.number}"/>
- <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-library_${scala.binary.version}" version="${scala-continuations-library.version.number}"/>
- <dependency groupId="org.scala-lang.modules" artifactId="scala-swing_${scala.binary.version}" version="${scala-swing.version.number}"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-xml${scala-xml.cross}" version="${scala-xml.version.number}"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-parser-combinators${scala-parser-combinators.cross}" version="${scala-parser-combinators.version.number}"/>
+ <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-plugin${scala-continuations-plugin.cross}" version="${scala-continuations-plugin.version.number}"/>
+ <dependency groupId="org.scala-lang.plugins" artifactId="scala-continuations-library${scala-continuations-library.cross}" version="${scala-continuations-library.version.number}"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-swing${scala-swing.cross}" version="${scala-swing.version.number}"/>
</artifact:dependencies>
<!-- External modules, excluding the core -->
@@ -411,8 +421,12 @@ TODO:
<condition property="has.java7">
<equals arg1="${ant.java.version}" arg2="1.7"/>
</condition>
+ <condition property="has.java8">
+ <equals arg1="${ant.java.version}" arg2="1.8"/>
+ </condition>
<condition property="has.unsupported.jdk">
<not><or>
+ <isset property="has.java8" />
<isset property="has.java7" />
<isset property="has.java6" />
</or></not>
@@ -1663,7 +1677,7 @@ TODO:
<fileset dir="${build-docs.dir}/library"/>
</copy>
- <copy toDir="${dist.dir}/doc/api" overwrite="true" flatten="true">
+ <copy toDir="${dist.dir}/api" overwrite="true" flatten="true">
<file file="${scala-xml-javadoc}"/>
<file file="${scala-parser-combinators-javadoc}"/>
<file file="${scala-continuations-plugin-javadoc}"/>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
index 424ac2898c..a0ebcecad1 100644
--- a/src/build/maven/scala-actors-pom.xml
+++ b/src/build/maven/scala-actors-pom.xml
@@ -38,17 +38,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-doc-pom.xml b/src/build/maven/scala-compiler-doc-pom.xml
index 30161d2fea..8572e55b42 100644
--- a/src/build/maven/scala-compiler-doc-pom.xml
+++ b/src/build/maven/scala-compiler-doc-pom.xml
@@ -45,17 +45,6 @@
<version>@PARSER_COMBINATORS_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-interactive-pom.xml b/src/build/maven/scala-compiler-interactive-pom.xml
index d59f305a9f..ad8192b694 100644
--- a/src/build/maven/scala-compiler-interactive-pom.xml
+++ b/src/build/maven/scala-compiler-interactive-pom.xml
@@ -35,17 +35,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index 4a000b27a1..8ca18f6f14 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -39,7 +39,7 @@
<artifactId>scala-reflect</artifactId>
<version>@VERSION@</version>
</dependency>
- <!-- TODO modularize compiler: these dependencies will disappear then the compiler is modularized -->
+ <!-- TODO modularize compiler: these dependencies will disappear when the compiler is modularized -->
<dependency> <!-- for scala-compiler-doc -->
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
@@ -57,17 +57,6 @@
<optional>true</optional>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
index 413da928bb..9a566d231b 100644
--- a/src/build/maven/scala-dist-pom.xml
+++ b/src/build/maven/scala-dist-pom.xml
@@ -51,17 +51,6 @@
<version>@JLINE_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
index f34a28e79a..b649c8c525 100644
--- a/src/build/maven/scala-library-all-pom.xml
+++ b/src/build/maven/scala-library-all-pom.xml
@@ -75,17 +75,6 @@
<version>@ACTORS_MIGRATION_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index d40cee2656..78fc05a7c3 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -33,17 +33,6 @@
</properties>
<dependencies>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
index d0a9c0e274..c21caefcf2 100644
--- a/src/build/maven/scala-reflect-pom.xml
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -38,17 +38,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index 88cfce08d8..236ac999fc 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -35,17 +35,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index cd3db74016..c1ab17027f 100644
--- a/src/compiler/scala/reflect/macros/contexts/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -46,9 +46,7 @@ trait Typers {
universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
- def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(universe.duplicateAndKeepPositions(tree))
+ def resetLocalAttrs(tree: Tree): Tree = universe.resetAttrs(universe.duplicateAndKeepPositions(tree))
- def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree))
-
- def untypecheck(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree))
+ def untypecheck(tree: Tree): Tree = resetLocalAttrs(tree)
}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index ad0632f93e..6b0a0ee8d7 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -86,7 +86,7 @@ abstract class Reifier extends States
throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString))
}
- // todo. why do we resetAllAttrs?
+ // todo. why do we reset attrs?
//
// typically we do some preprocessing before reification and
// the code emitted/moved around during preprocessing is very hard to typecheck, so we leave it as it is
@@ -109,19 +109,11 @@ abstract class Reifier extends States
//
// todo. this is a common problem with non-trivial macros in our current macro system
// needs to be solved some day
- // maybe try `resetLocalAttrs` once the dust settles
- var importantSymbols = Set[Symbol](
- NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
- ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, runDefinitions.ReflectRuntimeCurrentMirror)
- importantSymbols ++= importantSymbols map (_.companionSymbol)
- importantSymbols ++= importantSymbols map (_.moduleClass)
- importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
- def isImportantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym)
- val untyped = resetAllAttrs(result, leaveAlone = {
+ // upd. a new hope: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ
+ val untyped = resetAttrs(result, leaveAlone = {
case ValDef(_, u, _, _) if u == nme.UNIVERSE_SHORT => true
case ValDef(_, m, _, _) if m == nme.MIRROR_SHORT => true
case tree if symtab.syms contains tree.symbol => true
- case tree if isImportantSymbol(tree.symbol) => true
case _ => false
})
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index f6b3c42ca9..b082796757 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -140,7 +140,7 @@ trait GenTrees {
if (sym == NoSymbol) {
// this sometimes happens, e.g. for binds that don't have a body
// or for untyped code generated during previous phases
- // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?")
+ // (see a comment in Reifiers about the latter, starting with "why do we reset attrs?")
mirrorCall(nme.Ident, reify(name))
}
else if (!sym.isLocalToReifee) {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 5492e563dd..1617db7517 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -81,6 +81,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
+ def erasurePhase: Phase = if (currentRun.isDefined) currentRun.erasurePhase else NoPhase
+
// platform specific elements
protected class GlobalPlatform extends {
@@ -527,7 +529,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} with Erasure
// phaseName = "posterasure"
- object postErasure extends {
+ override object postErasure extends {
val global: Global.this.type = Global.this
val runsAfter = List("erasure")
val runsRightAfter = Some("erasure")
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 24bce0636d..7a7d4ac0b2 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -176,13 +176,24 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
}
- /** resets symbol and tpe fields in a tree, @see ResetAttrs
- */
-// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
-// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
- def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
+ // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler.
+ // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past.
+ //
+ // So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs.
+ // resetAllAttrs destroyed all symbols and types in the tree in order to reset its state to something suitable for retypechecking
+ // and/or embedding into bigger trees / different lexical scopes. (Btw here's some background on why people would want to use
+ // reset attrs in the first place: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ).
+ //
+ // However resetAllAttrs was more of a poison than of a treatment, because along with locally defined symbols that are the cause
+ // for almost every or maybe even every case of tree corruption, it erased external bindings that sometimes could not be restored.
+ // This is how we came up with resetLocalAttrs that left external bindings alone, and that was a big step forward.
+ // Then slowly but steadily we've evicted all usages of resetAllAttrs from our codebase in favor of resetLocalAttrs
+ // and have been living happily ever after.
+ //
+ // def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(localOnly = false, leaveAlone).transform(x)
+
+ /** @see ResetAttrs */
+ def resetAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(leaveAlone).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
@@ -193,7 +204,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*
* (bq:) This transformer has mutable state and should be discarded after use
*/
- private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
+ private class ResetAttrs(leaveAlone: Tree => Boolean = null) {
// this used to be based on -Ydebug, but the need for logging in this code is so situational
// that I've reverted to a hard-coded constant here.
val debug = false
@@ -277,29 +288,18 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
//
// The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
- // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // Here we take care of references to package classes (SI-5705).
// There are other non-idempotencies, but they are not worked around yet.
//
- // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
- //
- // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
- // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
- // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
- // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
- // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
- //
- // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
- // in one lexical scope, it can get resolved to something else.
- //
- // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
- // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
- // That's what localOnly and vetoScope are for.
+ // The second reason has to do with the fact that resetAttrs needs to be less destructive.
+ // Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not,
+ // therefore we want to retain those bindings, especially given that restoring them can be impossible
+ // if we move these trees into lexical contexts different from their original locations.
if (dupl.hasSymbol) {
val sym = dupl.symbol
- val vetoScope = localOnly && !(locals contains sym)
- val vetoLabel = keepLabels && sym.isLabel
+ val vetoScope = !(locals contains sym)
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
- if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ if (!(vetoScope || vetoThis)) dupl.symbol = NoSymbol
}
dupl.clearType()
}
@@ -308,10 +308,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
def transform(x: Tree): Tree = {
- if (localOnly)
new MarkLocals().traverse(x)
- if (localOnly && debug) {
+ if (debug) {
assert(locals.size == orderedLocals.size)
val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL
trace("locals (%d total): %n".format(orderedLocals.size))(msg)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 0728fff74f..e3d2bf14a0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -33,7 +33,7 @@ trait ParsersCommon extends ScannersCommon { self =>
import global.{currentUnit => _, _}
def newLiteral(const: Any) = Literal(Constant(const))
- def literalUnit = newLiteral(())
+ def literalUnit = gen.mkSyntheticUnit()
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
@@ -837,8 +837,14 @@ self =>
if (samePrecedence)
checkHeadAssoc(leftAssoc)
- def loop(top: Tree): Tree =
- if (canReduce) loop(finishBinaryOp(isExpr, popOpInfo(), top)) else top
+ def loop(top: Tree): Tree = if (canReduce) {
+ val info = popOpInfo()
+ if (!isExpr && info.targs.nonEmpty) {
+ syntaxError(info.offset, "type application is not allowed in pattern")
+ info.targs.foreach(_.setType(ErrorType))
+ }
+ loop(finishBinaryOp(isExpr, info, top))
+ } else top
loop(top)
}
@@ -1219,7 +1225,7 @@ self =>
// Like Swiss cheese, with holes
def stringCheese: Tree = atPos(in.offset) {
val start = in.offset
- val interpolator = in.name
+ val interpolator = in.name.encoded // ident() for INTERPOLATIONID
val partsBuf = new ListBuffer[Tree]
val exprBuf = new ListBuffer[Tree]
@@ -2131,8 +2137,6 @@ self =>
/* -------- PARAMETERS ------------------------------------------- */
- def allowTypelessParams = false
-
/** {{{
* ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
* ParamClause ::= [nl] `(' [Params] `)'
@@ -2147,56 +2151,6 @@ self =>
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
var implicitmod = 0
var caseParam = ofCaseClass
- def param(): ValDef = {
- val start = in.offset
- val annots = annotations(skipNewLines = false)
- var mods = Modifiers(Flags.PARAM)
- if (owner.isTypeName) {
- mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
- in.token match {
- case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token.toLong, tokenRange(in))
- if (v == VAR) mods |= Flags.MUTABLE
- in.nextToken()
- case _ =>
- if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
- if (!caseParam) mods |= Flags.PrivateLocal
- }
- if (caseParam) mods |= Flags.CASEACCESSOR
- }
- val nameOffset = in.offset
- val name = ident()
- var bynamemod = 0
- val tpt =
- if (((settings.YmethodInfer && !owner.isTypeName) || allowTypelessParams) && in.token != COLON) {
- TypeTree()
- } else { // XX-METHOD-INFER
- accept(COLON)
- if (in.token == ARROW) {
- if (owner.isTypeName && !mods.hasLocalFlag)
- syntaxError(
- in.offset,
- (if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", skipIt = false)
- else if (implicitmod != 0)
- syntaxError(
- in.offset,
- "implicit parameters may not be call-by-name", skipIt = false)
- else bynamemod = Flags.BYNAMEPARAM
- }
- paramType()
- }
- val default =
- if (in.token == EQUALS) {
- in.nextToken()
- mods |= Flags.DEFAULTPARAM
- expr()
- } else EmptyTree
- atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
- }
- }
def paramClause(): List[ValDef] = {
if (in.token == RPAREN)
return Nil
@@ -2205,7 +2159,7 @@ self =>
in.nextToken()
implicitmod = Flags.IMPLICIT
}
- commaSeparated(param())
+ commaSeparated(param(owner, implicitmod, caseParam ))
}
val vds = new ListBuffer[List[ValDef]]
val start = in.offset
@@ -2253,6 +2207,57 @@ self =>
}
}
+ def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = {
+ val start = in.offset
+ val annots = annotations(skipNewLines = false)
+ var mods = Modifiers(Flags.PARAM)
+ if (owner.isTypeName) {
+ mods = modifiers() | Flags.PARAMACCESSOR
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
+ in.token match {
+ case v @ (VAL | VAR) =>
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
+ if (v == VAR) mods |= Flags.MUTABLE
+ in.nextToken()
+ case _ =>
+ if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+ if (!caseParam) mods |= Flags.PrivateLocal
+ }
+ if (caseParam) mods |= Flags.CASEACCESSOR
+ }
+ val nameOffset = in.offset
+ val name = ident()
+ var bynamemod = 0
+ val tpt =
+ if ((settings.YmethodInfer && !owner.isTypeName) && in.token != COLON) {
+ TypeTree()
+ } else { // XX-METHOD-INFER
+ accept(COLON)
+ if (in.token == ARROW) {
+ if (owner.isTypeName && !mods.hasLocalFlag)
+ syntaxError(
+ in.offset,
+ (if (mods.isMutable) "`var'" else "`val'") +
+ " parameters may not be call-by-name", skipIt = false)
+ else if (implicitmod != 0)
+ syntaxError(
+ in.offset,
+ "implicit parameters may not be call-by-name", skipIt = false)
+ else bynamemod = Flags.BYNAMEPARAM
+ }
+ paramType()
+ }
+ val default =
+ if (in.token == EQUALS) {
+ in.nextToken()
+ mods |= Flags.DEFAULTPARAM
+ expr()
+ } else EmptyTree
+ atPos(start, if (name == nme.ERROR) start else nameOffset) {
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
+ }
+ }
+
/** {{{
* TypeParamClauseOpt ::= [TypeParamClause]
* TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 8011abc1ed..e8d46704c3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -475,14 +475,17 @@ trait Scanners extends ScannersCommon {
if (token == INTERPOLATIONID) {
nextRawChar()
if (ch == '\"') {
- nextRawChar()
- if (ch == '\"') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '\"') {
+ nextRawChar() // now eat it
offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
+ nextChar()
token = STRINGLIT
strVal = ""
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index cfee988efc..525dcffb0c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -56,7 +56,7 @@ abstract class TreeBuilder {
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
- require(isExpr || targs.isEmpty, s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+ require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
def mkSelection(t: Tree) = {
def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 1332d01dbd..b650cdfa09 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -46,8 +46,10 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = NoCompilationUnit
override def run() {
- scalaPrimitives.init()
- classes.clear()
+ if (!settings.isBCodeActive) {
+ scalaPrimitives.init()
+ classes.clear()
+ }
super.run()
}
@@ -1007,8 +1009,15 @@ abstract class GenICode extends SubComponent {
}
// emit conversion
- if (generatedType != expectedType)
- adapt(generatedType, expectedType, resCtx, tree.pos)
+ if (generatedType != expectedType) {
+ tree match {
+ case Literal(Constant(null)) if generatedType == NullReference =>
+ // literal null on the stack (as opposed to a boxed null, see SI-8233),
+ // we can bypass `adapt` which would otherwise emitt a redundant [DROP, CONSTANT(null)]
+ case _ =>
+ adapt(generatedType, expectedType, resCtx, tree.pos)
+ }
+ }
resCtx
}
@@ -1058,6 +1067,9 @@ abstract class GenICode extends SubComponent {
case (NothingReference, _) =>
ctx.bb.emit(THROW(ThrowableClass))
ctx.bb.enterIgnoreMode()
+ case (NullReference, REFERENCE(_)) =>
+ // SI-8223 we can't assume that the stack contains a `null`, it might contain a Null$
+ ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
case _ if from isAssignabledTo to =>
()
case (_, UNIT) =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 267fa15312..64146585e5 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -21,7 +21,7 @@ trait Members {
import global._
- object NoCode extends Code(null, "NoCode") {
+ object NoCode extends Code(null, TermName("NoCode")) {
override def blocksList: List[BasicBlock] = Nil
}
@@ -29,8 +29,8 @@ trait Members {
* This class represents the intermediate code of a method or
* other multi-block piece of code, like exception handlers.
*/
- class Code(method: IMethod, name: String) {
- def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
+ class Code(method: IMethod, name: Name) {
+ def this(method: IMethod) = this(method, method.symbol.name)
/** The set of all blocks */
val blocks = mutable.ListBuffer[BasicBlock]()
@@ -82,7 +82,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'"
+ override def toString = "ICode '" + name.decoded + "'"
/* Compute a unique new label */
def nextLabel: Int = {
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 90c15bca61..55f45257dc 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -55,23 +55,26 @@ abstract class Pickler extends SubComponent {
case _ =>
}
}
- // If there are any erroneous types in the tree, then we will crash
- // when we pickle it: so let's report an error instead. We know next
- // to nothing about what happened, but our supposition is a lot better
- // than "bad type: <error>" in terms of explanatory power.
- for (t <- unit.body) {
- if (t.isErroneous) {
- unit.error(t.pos, "erroneous or inaccessible type")
- return
- }
- if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
- return
- }
+ try {
+ pickle(unit.body)
+ } catch {
+ case e: FatalError =>
+ for (t <- unit.body) {
+ // If there are any erroneous types in the tree, then we will crash
+ // when we pickle it: so let's report an error instead. We know next
+ // to nothing about what happened, but our supposition is a lot better
+ // than "bad type: <error>" in terms of explanatory power.
+ //
+ // OPT: do this only as a recovery after fatal error. Checking in advance was expensive.
+ if (t.isErroneous) {
+ if (settings.debug) e.printStackTrace()
+ unit.error(t.pos, "erroneous or inaccessible type")
+ return
+ }
+ }
+ throw e
}
-
- pickle(unit.body)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 4bbfc945f6..ccfddab94a 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -92,11 +92,22 @@ abstract class Erasure extends AddInterfaces
// more rigorous way up front rather than catching it after the fact,
// but that will be more involved.
private def dotCleanup(sig: String): String = {
+ // OPT 50% of time in generic signatures (~1% of compile time) was in this method, hence the imperative rewrite.
var last: Char = '\u0000'
- sig map {
- case '.' if last != '>' => last = '.' ; '$'
- case ch => last = ch ; ch
+ var i = 0
+ val len = sig.length
+ val copy: Array[Char] = sig.toCharArray
+ var changed = false
+ while (i < sig.length) {
+ val ch = copy(i)
+ if (ch == '.' && last != '>') {
+ copy(i) = '$'
+ changed = true
+ }
+ last = ch
+ i += 1
}
+ if (changed) new String(copy) else sig
}
/** This object is only used for sanity testing when -check:genjvm is set.
@@ -522,6 +533,8 @@ abstract class Erasure extends AddInterfaces
class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
val typer = this.asInstanceOf[analyzer.Typer]
+ override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
+
/** Replace member references as follows:
*
* - `x == y` for == in class Any becomes `x equals y` with equals in class Object.
@@ -760,7 +773,7 @@ abstract class Erasure extends AddInterfaces
|| super.exclude(sym)
|| !sym.hasTypeAt(currentRun.refchecksPhase.id)
)
- override def matches(sym1: Symbol, sym2: Symbol) = true
+ override def matches(lo: Symbol, high: Symbol) = true
}
def isErasureDoubleDef(pair: SymbolPair) = {
import pair._
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 4222c4d8c8..870eafbf20 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -24,15 +24,16 @@ abstract class OverridingPairs extends SymbolPairs {
/** Symbols to exclude: Here these are constructors and private/artifact symbols,
* including bridges. But it may be refined in subclasses.
*/
- override protected def exclude(sym: Symbol) = (sym hasFlag PRIVATE | ARTIFACT) || sym.isConstructor
+ override protected def exclude(sym: Symbol) = sym.isPrivateLocal || sym.isArtifact || sym.isConstructor
/** Types always match. Term symbols match if their member types
* relative to `self` match.
*/
- override protected def matches(sym1: Symbol, sym2: Symbol) = sym1.isType || (
- (sym1.owner != sym2.owner)
- && !exclude(sym2)
- && relatively.matches(sym1, sym2)
- )
+ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
+ (lo.owner != high.owner) // don't try to form pairs from overloaded members
+ && !high.isPrivate // private or private[this] members never are overriden
+ && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
+ && relatively.matches(lo, high)
+ ) // TODO we don't call exclude(high), should we?
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index cc78e27282..32987fed8c 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -8,7 +8,7 @@ package transform
/** This phase maps ErasedValueTypes to the underlying unboxed representation and
* performs peephole optimizations.
*/
-trait PostErasure extends InfoTransform with TypingTransformers {
+trait PostErasure extends InfoTransform with TypingTransformers with scala.reflect.internal.transform.PostErasure {
val global: Global
import global._
@@ -19,16 +19,6 @@ trait PostErasure extends InfoTransform with TypingTransformers {
def newTransformer(unit: CompilationUnit): Transformer = new PostErasureTransformer(unit)
override def changesBaseClasses = false
- object elimErasedValueType extends TypeMap {
- def apply(tp: Type) = tp match {
- case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
- case ErasedValueType(_, underlying) => underlying
- case _ => mapOver(tp)
- }
- }
-
- def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
-
class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
override def transform(tree: Tree) = {
def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 3791af1629..c59b726076 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -306,6 +306,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
+ *
+ * !!! Is this safe in the face of the following?
+ * scala> trait T { def foo[A] = 0}; object O extends T { override def foo[B] = 0 }
*/
private def specializedName(sym: Symbol, env: TypeEnv): TermName = {
val tvars = (
@@ -391,13 +394,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* enclosing member with the annotation.
*/
private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
- !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+ !hasUnspecializableAnnotation(sym) && (
specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
|| sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
|| isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
)
)
+ private def hasUnspecializableAnnotation(sym: Symbol): Boolean =
+ sym.ownerChain.exists(_ hasAnnotation UnspecializedClass)
+
def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
case NormalizedMember(_) => true
case _ => false
@@ -433,10 +439,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else
specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if tp.isSpecialized => arg })
- case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: tparams.map(_.info))
+ case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: mapList(tparams)(symInfo)) // OPT
// since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
case NullaryMethodType(resTpe) => specializedTypeVars(resTpe)
- case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
+ case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: mapList(argSyms)(symTpe)) // OPT
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
@@ -907,16 +913,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (sym.isMethod) {
- val stvars = specializedTypeVars(sym)
- if (stvars.nonEmpty)
- debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
+ if (hasUnspecializableAnnotation(sym)) {
+ List()
+ } else {
+ val stvars = specializedTypeVars(sym)
+ if (stvars.nonEmpty)
+ debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
- val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
- val tps2 = tps1 filter stvars
- if (!sym.isDeferred)
- addConcreteSpecMethod(sym)
+ val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
+ val tps2 = tps1 filter stvars
+ if (!sym.isDeferred)
+ addConcreteSpecMethod(sym)
- specializeOn(tps2)
+ specializeOn(tps2)
+ }
}
else Nil
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b471d16ddd..5973c70583 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -128,6 +128,7 @@ abstract class TailCalls extends Transform {
logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+
}
object EmptyTailContext extends TailContext {
@@ -185,6 +186,18 @@ abstract class TailCalls extends Transform {
private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
+ override def transformUnit(unit: CompilationUnit): Unit = {
+ try {
+ super.transformUnit(unit)
+ } finally {
+ // OPT clear these after each compilation unit
+ failPositions.clear()
+ failReasons.clear()
+ accessed.clear()
+ }
+ }
+
/** Rewrite this tree to contain no tail recursive calls */
def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
@@ -218,12 +231,12 @@ abstract class TailCalls extends Transform {
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
- failReasons(ctx) = reason
+ if (ctx.isMandatory) failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
/* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- failPositions(ctx) = fun.pos
+ if (ctx.isMandatory) failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
@@ -237,7 +250,8 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
+ if (ctx.isMandatory && receiverIsSuper) // OPT expensive check, avoid unless we will actually report the error
+ failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
@@ -245,6 +259,11 @@ abstract class TailCalls extends Transform {
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
+
+ def isEligible(tree: DefDef) = {
+ val sym = tree.symbol
+ !(sym.hasAccessorFlag || sym.isConstructor)
+ }
tree match {
case ValDef(_, _, _, _) =>
@@ -253,7 +272,7 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
val newCtx = new DefDefTailContext(dd)
if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e193cf3de2..e7ea686bc8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -105,12 +105,11 @@ abstract class UnCurry extends InfoTransform
*/
def isByNameRef(tree: Tree) = (
tree.isTerm
- && !byNameArgs(tree)
&& (tree.symbol ne null)
&& (isByName(tree.symbol))
+ && !byNameArgs(tree)
)
-
// ------- Handling non-local returns -------------------------------------------------
/** The type of a non-local return expression with given argument type */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 94f8f509fc..b899cd8994 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -186,7 +186,7 @@ trait Checkable {
* additional conditions holds:
* - either A or B is effectively final
* - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin)
- * - both A and B are sealed, and every possible pairing of their children is irreconcilable
+ * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable
*
* TODO: the last two conditions of the last possibility (that the symbols are not of
* classes being compiled in the current run) are because this currently runs too early,
@@ -198,8 +198,9 @@ trait Checkable {
isEffectivelyFinal(sym1) // initialization important
|| isEffectivelyFinal(sym2)
|| !sym1.isTrait && !sym2.isTrait
- || sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+ || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
)
+ private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal
private def isEffectivelyFinal(sym: Symbol): Boolean = (
// initialization important
sym.initialize.isEffectivelyFinal || (
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4d0eda2377..2043eb5d5d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -418,7 +418,7 @@ trait ContextErrors {
case TypeRef(_, _, arg :: _) if arg.typeSymbol == TupleClass(funArity) && funArity > 1 =>
sm"""|
|Note: The expected type requires a one-argument function accepting a $funArity-Tuple.
- | Consider a pattern matching anoynmous function, `{ case $example => ... }`"""
+ | Consider a pattern matching anonymous function, `{ case $example => ... }`"""
case _ => ""
}
case _ => ""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 598b12b00d..5b7956a757 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -457,7 +457,9 @@ trait Contexts { self: Analyzer =>
c.prefix = prefixInChild
c.enclClass = if (isTemplateOrPackage) c else enclClass
c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
- c.enclMethod = if (isDefDef) c else enclMethod
+
+ // SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
+ c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
@@ -1303,7 +1305,7 @@ trait Contexts { self: Analyzer =>
var renamed = false
var selectors = tree.selectors
def current = selectors.head
- while (selectors.nonEmpty && result == NoSymbol) {
+ while ((selectors ne Nil) && result == NoSymbol) {
if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
if (name.isTypeName) current.name.toTypeName else current.name)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 91321d4700..776920ed42 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -137,10 +137,6 @@ trait Implicits {
private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 }
- private def isInvalidConversionTarget(tpe: Type): Boolean = tpe match {
- case Function1(_, out) => AnyRefClass.tpe <:< out
- case _ => false
- }
private def isInvalidConversionSource(tpe: Type): Boolean = tpe match {
case Function1(in, _) => in <:< NullClass.tpe
case _ => false
@@ -629,7 +625,8 @@ trait Implicits {
// for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???`
// then Scaladoc will give us something of type `C[T]`, and it would like to know
// that `conv` is potentially available under such and such conditions
- case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc => applyImplicitArgs(tree)
+ case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc =>
+ applyImplicitArgs(tree)
case tree => tree
}
case _ => fallback
@@ -791,7 +788,7 @@ trait Implicits {
final class LocalShadower extends Shadower {
val shadowed = util.HashSet[Name](512)
def addInfos(infos: Infos) {
- shadowed addEntries infos.map(_.name)
+ infos.foreach(i => shadowed.addEntry(i.name))
}
def isShadowed(name: Name) = shadowed(name)
}
@@ -808,7 +805,6 @@ trait Implicits {
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && (info.sym eq Predef_conforms)
- || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -817,6 +813,7 @@ trait Implicits {
def survives(info: ImplicitInfo) = (
!isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded
&& isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt
+ && !shadower.isShadowed(info.name) // OPT rare, only check for plausible candidates
&& matchesPt(info) // stable and matches expected type
)
/** The implicits that are not valid because they come later in the source and
@@ -1361,11 +1358,17 @@ trait Implicits {
if (context.ambiguousErrors)
context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
}
- if (isInvalidConversionTarget(pt)) {
- maybeInvalidConversionError("the result type of an implicit conversion must be more specific than AnyRef")
- result = SearchFailure
+ pt match {
+ case Function1(_, out) =>
+ def prohibit(sym: Symbol) = if (sym.tpe <:< out) {
+ maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}")
+ result = SearchFailure
+ }
+ prohibit(AnyRefClass)
+ if (settings.isScala211) prohibit(AnyValClass)
+ case _ => false
}
- else if (settings.isScala211 && isInvalidConversionSource(pt)) {
+ if (settings.isScala211 && isInvalidConversionSource(pt)) {
maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
result = SearchFailure
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index dd0923a696..997fd6fc65 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -595,6 +595,7 @@ trait Infer extends Checkable {
}
private[typechecker] def followApply(tp: Type): Type = tp match {
+ case _ if tp.isError => tp // SI-8228, `ErrorType nonPrivateMember nme.apply` returns an member with an erroneous type!
case NullaryMethodType(restp) =>
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 27e8698676..886dbed6d6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1069,8 +1069,9 @@ trait Namers extends MethodSynthesis {
}
def overriddenSymbol(resTp: Type) = {
+ lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala
intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
+ sym != NoSymbol && (site.memberType(sym) matches schema)
}
}
// TODO: see whether this or something similar would work instead:
@@ -1204,7 +1205,7 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetLocalAttrs(ddef.duplicate)
+ val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate)
// having defs here is important to make sure that there's no sneaky tree sharing
// in methods with multiple default parameters
def rtparams = rtparams0.map(_.duplicate)
@@ -1291,7 +1292,7 @@ trait Namers extends MethodSynthesis {
return // fix #3649 (prevent crash in erroneous source code)
}
}
- val ClassDef(_, _, rtparams, _) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, rtparams, _) = resetAttrs(cdef.duplicate)
defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT)))
nmr
}
@@ -1407,8 +1408,14 @@ trait Namers extends MethodSynthesis {
if (expr1.isErrorTyped)
ErrorType
else {
- if (!treeInfo.isStableIdentifierPattern(expr1))
- typer.TyperErrorGen.UnstableTreeError(expr1)
+ expr1 match {
+ case This(_) =>
+ // SI-8207 okay, typedIdent expands Ident(self) to C.this which doesn't satisfy the next case
+ // TODO should we change `typedIdent` not to expand to the `Ident` to a `This`?
+ case _ if treeInfo.isStableIdentifierPattern(expr1) =>
+ case _ =>
+ typer.TyperErrorGen.UnstableTreeError(expr1)
+ }
val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
checkSelectors(newImport)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 46ff98875f..6a4df415ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -173,7 +173,7 @@ trait NamesDefaults { self: Analyzer =>
// setSymbol below is important because the 'selected' function might be overloaded. by
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
- val selectPos =
+ val selectPos =
if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end))
else baseFun.pos
val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos))
@@ -287,7 +287,7 @@ trait NamesDefaults { self: Analyzer =>
}
else {
// TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
- // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (SI-7516),
// which is important for (at least) macros.
arg.tpe
}
@@ -310,7 +310,7 @@ trait NamesDefaults { self: Analyzer =>
new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
if (repeated) arg match {
case WildcardStarArg(expr) => expr
- case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg))
+ case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg))
} else arg
}
Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 68d724b6fc..2125e281f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1414,7 +1414,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeRef(pre, sym, args) =>
tree match {
case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
- // FIXME: reconcile this check with one in resetAllAttrs
+ // FIXME: reconcile this check with one in resetAttrs
case _ => checkUndesiredProperties(sym, tree.pos)
}
if(sym.isJavaDefined)
@@ -1596,6 +1596,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkUnexpandedMacro(t: Tree) =
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro)
+ unit.error(t.pos, "macro has not been expanded")
+
override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
@@ -1755,6 +1759,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
varianceValidator.traverse(tt.original) // See SI-7872
case _ =>
}
+
+ checkUnexpandedMacro(result)
+
result
} catch {
case ex: TypeError =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 995f98cc2c..57f27a05fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -46,7 +46,7 @@ trait StdAttachments {
* The parameter is of type `Any`, because macros can expand both into trees and into annotations.
*/
def hasMacroExpansionAttachment(any: Any): Boolean = any match {
- case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case tree: Tree => tree.hasAttachment[MacroExpansionAttachment]
case _ => false
}
@@ -96,7 +96,7 @@ trait StdAttachments {
*/
def isMacroExpansionSuppressed(tree: Tree): Boolean =
( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
- || tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined
+ || tree.hasAttachment[SuppressMacroExpansionAttachment.type]
|| (tree match {
// we have to account for the fact that during typechecking an expandee might become wrapped,
// i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
@@ -150,7 +150,7 @@ trait StdAttachments {
/** Determines whether a tree should or should not be adapted,
* because someone has put MacroImplRefAttachment on it.
*/
- def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
+ def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type]
/** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected
* from typedNamedApply, the applyDynamicNamed argument rewriter, the latter
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 9776b1e80e..10fe530445 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -579,7 +579,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+
// Side effect time! Don't be an idiot like me and think you
// can move "val sym = tree.symbol" before this line, because
// inferExprAlternative side-effects the tree's symbol.
@@ -831,7 +832,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else tpr.typed(withImplicitArgs, mode, pt)
}
orElse { _ =>
- val resetTree = resetLocalAttrs(original)
+ val resetTree = resetAttrs(original)
debuglog(s"fallback on implicits: ${tree}/$resetTree")
val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
@@ -992,7 +993,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptMismatchedSkolems() = {
def canIgnoreMismatch = (
!context.reportErrors && isPastTyper
- || tree.attachments.get[MacroExpansionAttachment].isDefined
+ || tree.hasAttachment[MacroExpansionAttachment]
)
def bound = pt match {
case ExistentialType(qs, _) => qs
@@ -2232,14 +2233,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
DeprecatedParamNameError(p, n)
}
}
- }
- if (meth.isStructuralRefinementMember)
- checkMethodStructuralCompatible(ddef)
+ if (meth.isStructuralRefinementMember)
+ checkMethodStructuralCompatible(ddef)
- if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
- case List(param) :: _ if !param.isImplicit =>
- checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
- case _ =>
+ if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+ case List(param) :: _ if !param.isImplicit =>
+ checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+ case _ =>
+ }
}
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
@@ -2306,7 +2307,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
- val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
+ val LabelDef(_, _, rhs1) = resetAttrs(ldef)
+ val rhs2 = typed(rhs1, restpe)
ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
@@ -2588,7 +2590,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
//
// Well behaved trees satisfy the property:
//
- // typed(tree) == typed(resetLocalAttrs(typed(tree))
+ // typed(tree) == typed(resetAttrs(typed(tree))
//
// Trees constructed without low-level symbol manipulation get this for free;
// references to local symbols are cleared by `ResetAttrs`, but bind to the
@@ -3411,7 +3413,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen stableTypeFor arg orElse arg.tpe))
+ val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
@@ -3831,7 +3833,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// lifted out of typed1 because it's needed in typedImplicit0
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
- inferPolyAlternatives(fun, args map (_.tpe))
+ inferPolyAlternatives(fun, mapList(args)(treeTpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
@@ -3851,7 +3853,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
- val targs = args map (_.tpe)
+ val targs = mapList(args)(treeTpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (isPredefClassOf(fun.symbol))
typedClassOf(tree, args.head, noGen = true)
@@ -4871,7 +4873,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedHigherKindedType(arg, mode, pt)
}
- val argtypes = args1 map (_.tpe)
+ val argtypes = mapList(args1)(treeTpe)
foreach2(args, tparams) { (arg, tparam) =>
// note: can't use args1 in selector, because Binds got replaced
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index ffac29b4b8..cc2d9141ce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -52,13 +52,13 @@ trait Unapplies extends ast.TreeDSL {
}
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
- val ClassDef(_, _, _, Template(_, _, body)) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, _, Template(_, _, body)) = resetAttrs(cdef.duplicate)
val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body
vparamss
}
private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = {
- val ClassDef(_, _, tparams, _) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, tparams, _) = resetAttrs(cdef.duplicate)
val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT)))
tparamsInvariant
}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 4237f36ade..bd95fdbb50 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -7,7 +7,7 @@ package scala
package tools
package nsc
-import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter, Reader }
package object util {
// forwarder for old code that builds against 2.9 and 2.10
@@ -46,6 +46,17 @@ package object util {
(result, ts2 filterNot (ts1 contains _))
}
+ def stringFromReader(reader: Reader) = {
+ val writer = new StringWriter()
+ var c = reader.read()
+ while(c != -1) {
+ writer.write(c)
+ c = reader.read()
+ }
+ reader.close()
+ writer.toString()
+ }
+
/** Generate a string using a routine that wants to write on a stream. */
def stringFromWriter(writer: PrintWriter => Unit): String = {
val stringWriter = new StringWriter()
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index bb0bbd79a3..8630ecf69e 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -20,8 +20,8 @@ trait FastTrack {
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
- new { val c: c0.type = c0 } with MacroImplementations
+ private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } =
+ new { val c: c0.type = c0 } with FormatInterpolator
private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
new { val c: c0.type = c0 } with QuasiquoteImpls
private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
@@ -48,7 +48,7 @@ trait FastTrack {
makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
- makeBlackbox( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ makeBlackbox( StringContext_f) { case _ => _.interpolate },
makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree },
makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote },
makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote }
diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
new file mode 100644
index 0000000000..d5e674ebae
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
@@ -0,0 +1,329 @@
+package scala.tools.reflect
+
+import scala.reflect.macros.runtime.Context
+import scala.collection.mutable.{ ListBuffer, Stack }
+import scala.reflect.internal.util.Position
+import scala.PartialFunction.cond
+import scala.util.matching.Regex.Match
+
+import java.util.{ Formatter, Formattable, IllegalFormatException }
+
+abstract class FormatInterpolator {
+ val c: Context
+ val global: c.universe.type = c.universe
+
+ import c.universe.{ Match => _, _ }
+ import definitions._
+ import treeInfo.Applied
+
+ @inline private def truly(body: => Unit): Boolean = { body ; true }
+ @inline private def falsely(body: => Unit): Boolean = { body ; false }
+
+ private def fail(msg: String) = c.abort(c.enclosingPosition, msg)
+ private def bail(msg: String) = global.abort(msg)
+
+ def interpolate: Tree = c.macroApplication match {
+ //case q"$_(..$parts).f(..$args)" =>
+ case Applied(Select(Apply(_, parts), _), _, argss) =>
+ val args = argss.flatten
+ def badlyInvoked = (parts.length != args.length + 1) && truly {
+ def because(s: String) = s"too $s arguments for interpolated string"
+ val (p, msg) =
+ if (parts.length == 0) (c.prefix.tree.pos, "there are no parts")
+ else if (args.length + 1 < parts.length)
+ (if (args.isEmpty) c.enclosingPosition else args.last.pos, because("few"))
+ else (args(parts.length-1).pos, because("many"))
+ c.abort(p, msg)
+ }
+ if (badlyInvoked) c.macroApplication else interpolated(parts, args)
+ case other =>
+ bail(s"Unexpected application ${showRaw(other)}")
+ other
+ }
+
+ /** Every part except the first must begin with a conversion for
+ * the arg that preceded it. If the conversion is missing, "%s"
+ * is inserted.
+ *
+ * In any other position, the only permissible conversions are
+ * the literals (%% and %n) or an index reference (%1$ or %<).
+ *
+ * A conversion specifier has the form:
+ *
+ * [index$][flags][width][.precision]conversion
+ *
+ * 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ * 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ * 3) "...${smth}%" => error
+ * 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ * 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ * 6) "...${smth}[%legalJavaConversion]" => okay*
+ * 7) "...${smth}[%illegalJavaConversion]" => error
+ * *Legal according to [[http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html]]
+ */
+ def interpolated(parts: List[Tree], args: List[Tree]) = {
+ val fstring = new StringBuilder
+ val evals = ListBuffer[ValDef]()
+ val ids = ListBuffer[Ident]()
+ val argStack = Stack(args: _*)
+
+ // create a tmp val and add it to the ids passed to format
+ def defval(value: Tree, tpe: Type): Unit = {
+ val freshName = TermName(c.freshName("arg$"))
+ evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
+ ids += Ident(freshName)
+ }
+ // Append the nth part to the string builder, possibly prepending an omitted %s first.
+ // Sanity-check the % fields in this part.
+ def copyPart(part: Tree, n: Int): Unit = {
+ import SpecifierGroups.{ Spec, Index }
+ val s0 = part match {
+ case Literal(Constant(x: String)) => x
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val s = StringContext.treatEscapes(s0)
+ val ms = fpat findAllMatchIn s
+
+ def errorLeading(op: Conversion) = op.errorAt(Spec, s"conversions must follow a splice; ${Conversion.literalHelp}")
+
+ def first = n == 0
+ // a conversion for the arg is required
+ if (!first) {
+ val arg = argStack.pop()
+ def s_%() = {
+ fstring append "%s"
+ defval(arg, AnyTpe)
+ }
+ def accept(op: Conversion) = {
+ if (!op.isLeading) errorLeading(op)
+ op.accepts(arg) match {
+ case Some(tpe) => defval(arg, tpe)
+ case None =>
+ }
+ }
+ if (ms.hasNext) {
+ Conversion(ms.next, part.pos, args.size) match {
+ case Some(op) if op.isLiteral => s_%()
+ case Some(op) if op.indexed =>
+ if (op.index map (_ == n) getOrElse true) accept(op)
+ else {
+ // either some other arg num, or '<'
+ c.warning(op.groupPos(Index), "Index is not this arg")
+ s_%()
+ }
+ case Some(op) => accept(op)
+ case None =>
+ }
+ } else s_%()
+ }
+ // any remaining conversions must be either literals or indexed
+ while (ms.hasNext) {
+ Conversion(ms.next, part.pos, args.size) match {
+ case Some(op) if first && op.hasFlag('<') => op.badFlag('<', "No last arg")
+ case Some(op) if op.isLiteral || op.indexed => // OK
+ case Some(op) => errorLeading(op)
+ case None =>
+ }
+ }
+ fstring append s
+ }
+
+ parts.zipWithIndex foreach {
+ case (part, n) => copyPart(part, n)
+ }
+
+ //q"{..$evals; ${fstring.toString}.format(..$ids)}"
+ locally {
+ val expr =
+ Apply(
+ Select(
+ Literal(Constant(fstring.toString)),
+ newTermName("format")),
+ ids.toList
+ )
+ val p = c.macroApplication.pos
+ Block(evals.toList, atPos(p.focus)(expr)) setPos p.makeTransparent
+ }
+ }
+
+ val fpat = """%(?:(\d+)\$)?([-#+ 0,(\<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r
+ object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value }
+
+ val stdContextTags = new { val tc: c.type = c } with StdContextTags
+ import stdContextTags._
+ val tagOfFormattable = typeTag[Formattable]
+
+ /** A conversion specifier matched by `m` in the string part at `pos`,
+ * with `argc` arguments to interpolate.
+ */
+ sealed trait Conversion {
+ def m: Match
+ def pos: Position
+ def argc: Int
+
+ import SpecifierGroups.{ Value => SpecGroup, _ }
+ private def maybeStr(g: SpecGroup) = Option(m group g.id)
+ private def maybeInt(g: SpecGroup) = maybeStr(g) map (_.toInt)
+ val index: Option[Int] = maybeInt(Index)
+ val flags: Option[String] = maybeStr(Flags)
+ val width: Option[Int] = maybeInt(Width)
+ val precision: Option[Int] = maybeStr(Precision) map (_.drop(1).toInt)
+ val op: String = maybeStr(CC) getOrElse ""
+
+ def cc: Char = if ("tT" contains op(0)) op(1) else op(0)
+
+ def indexed: Boolean = index.nonEmpty || hasFlag('<')
+ def isLiteral: Boolean = false
+ def isLeading: Boolean = m.start(0) == 0
+ def verify: Boolean = goodFlags && goodIndex
+ def accepts(arg: Tree): Option[Type]
+
+ val allFlags = "-#+ 0,(<"
+ def hasFlag(f: Char) = (flags getOrElse "") contains f
+ def hasAnyFlag(fs: String) = fs exists (hasFlag)
+
+ def badFlag(f: Char, msg: String) = {
+ val i = flags map (_.indexOf(f)) filter (_ >= 0) getOrElse 0
+ errorAtOffset(Flags, i, msg)
+ }
+ def groupPos(g: SpecGroup) = groupPosAt(g, 0)
+ def groupPosAt(g: SpecGroup, i: Int) = pos withPoint (pos.point + m.start(g.id) + i)
+ def errorAt(g: SpecGroup, msg: String) = c.error(groupPos(g), msg)
+ def errorAtOffset(g: SpecGroup, i: Int, msg: String) = c.error(groupPosAt(g, i), msg)
+
+ def noFlags = flags.isEmpty || falsely { errorAt(Flags, "flags not allowed") }
+ def noWidth = width.isEmpty || falsely { errorAt(Width, "width not allowed") }
+ def noPrecision = precision.isEmpty || falsely { errorAt(Precision, "precision not allowed") }
+ def only_-(msg: String) = {
+ val badFlags = (flags getOrElse "") filterNot { case '-' | '<' => true case _ => false }
+ badFlags.isEmpty || falsely { badFlag(badFlags(0), s"Only '-' allowed for $msg") }
+ }
+ protected def okFlags: String = allFlags
+ def goodFlags = {
+ val badFlags = flags map (_ filterNot (okFlags contains _))
+ for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'")
+ badFlags.getOrElse("").isEmpty
+ }
+ def goodIndex = {
+ if (index.nonEmpty && hasFlag('<'))
+ c.warning(groupPos(Index), "Argument index ignored if '<' flag is present")
+ val okRange = index map (i => i > 0 && i <= argc) getOrElse true
+ okRange || hasFlag('<') || falsely { errorAt(Index, "Argument index out of range") }
+ }
+ /** Pick the type of an arg to format from among the variants
+ * supported by a conversion. This is the type of the temporary,
+ * so failure results in an erroneous assignment to the first variant.
+ * A more complete message would be nice.
+ */
+ def pickAcceptable(arg: Tree, variants: Type*): Option[Type] =
+ variants find (arg.tpe <:< _) orElse (
+ variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree)
+ ) orElse Some(variants(0))
+ }
+ object Conversion {
+ import SpecifierGroups.{ Spec, CC, Width }
+ def apply(m: Match, p: Position, n: Int): Option[Conversion] = {
+ def badCC(msg: String) = {
+ val dk = new ErrorXn(m, p)
+ val at = if (dk.op.isEmpty) Spec else CC
+ dk.errorAt(at, msg)
+ }
+ def cv(cc: Char) = cc match {
+ case 'b' | 'B' | 'h' | 'H' | 's' | 'S' =>
+ new GeneralXn(m, p, n)
+ case 'c' | 'C' =>
+ new CharacterXn(m, p, n)
+ case 'd' | 'o' | 'x' | 'X' =>
+ new IntegralXn(m, p, n)
+ case 'e' | 'E' | 'f' | 'g' | 'G' | 'a' | 'A' =>
+ new FloatingPointXn(m, p, n)
+ case 't' | 'T' =>
+ new DateTimeXn(m, p, n)
+ case '%' | 'n' =>
+ new LiteralXn(m, p, n)
+ case _ =>
+ badCC(s"illegal conversion character '$cc'")
+ null
+ }
+ Option(m group CC.id) map (cc => cv(cc(0))) match {
+ case Some(x) => Option(x) filter (_.verify)
+ case None =>
+ badCC(s"Missing conversion operator in '${m.matched}'; $literalHelp")
+ None
+ }
+ }
+ val literalHelp = "use %% for literal %, %n for newline"
+ }
+ class GeneralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ def accepts(arg: Tree) = cc match {
+ case 's' | 'S' if hasFlag('#') => pickAcceptable(arg, tagOfFormattable.tpe)
+ case 'b' | 'B' => if (arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
+ case _ => Some(AnyTpe)
+ }
+ override protected def okFlags = cc match {
+ case 's' | 'S' => "-#<"
+ case _ => "-<"
+ }
+ }
+ class LiteralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ import SpecifierGroups.Width
+ override val isLiteral = true
+ override def verify = op match {
+ case "%" => super.verify && noPrecision && truly(width foreach (_ => c.warning(groupPos(Width), "width ignored on literal")))
+ case "n" => noFlags && noWidth && noPrecision
+ }
+ override protected val okFlags = "-"
+ def accepts(arg: Tree) = None
+ }
+ class CharacterXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = super.verify && noPrecision && only_-("c conversion")
+ def accepts(arg: Tree) = pickAcceptable(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
+ }
+ class IntegralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = {
+ def d_# = (cc == 'd' && hasFlag('#') &&
+ truly { badFlag('#', "# not allowed for d conversion") }
+ )
+ def x_comma = (cc != 'd' && hasFlag(',') &&
+ truly { badFlag(',', "',' only allowed for d conversion of integral types") }
+ )
+ super.verify && noPrecision && !d_# && !x_comma
+ }
+ override def accepts(arg: Tree) = {
+ def isBigInt = arg.tpe <:< tagOfBigInt.tpe
+ val maybeOK = "+ ("
+ def bad_+ = cond(cc) {
+ case 'o' | 'x' | 'X' if hasAnyFlag(maybeOK) && !isBigInt =>
+ maybeOK filter hasFlag foreach (badf =>
+ badFlag(badf, s"only use '$badf' for BigInt conversions to o, x, X"))
+ true
+ }
+ if (bad_+) None else pickAcceptable(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
+ }
+ }
+ class FloatingPointXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = super.verify && (cc match {
+ case 'a' | 'A' =>
+ val badFlags = ",(" filter hasFlag
+ noPrecision && badFlags.isEmpty || falsely {
+ badFlags foreach (badf => badFlag(badf, s"'$badf' not allowed for a, A"))
+ }
+ case _ => true
+ })
+ def accepts(arg: Tree) = pickAcceptable(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
+ }
+ class DateTimeXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ import SpecifierGroups.CC
+ def hasCC = (op.length == 2 ||
+ falsely { errorAt(CC, "Date/time conversion must have two characters") })
+ def goodCC = ("HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc" contains cc) ||
+ falsely { errorAtOffset(CC, 1, s"'$cc' doesn't seem to be a date or time conversion") }
+ override def verify = super.verify && hasCC && goodCC && noPrecision && only_-("date/time conversions")
+ def accepts(arg: Tree) = pickAcceptable(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
+ }
+ class ErrorXn(val m: Match, val pos: Position) extends Conversion {
+ val argc = 0
+ override def verify = false
+ def accepts(arg: Tree) = None
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
deleted file mode 100644
index a9ed419b1e..0000000000
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ /dev/null
@@ -1,171 +0,0 @@
-package scala.tools.reflect
-
-import scala.reflect.macros.contexts.Context
-import scala.collection.mutable.ListBuffer
-import scala.collection.mutable.Stack
-import scala.reflect.internal.util.Position
-
-abstract class MacroImplementations {
- val c: Context
-
- import c.universe._
- import definitions._
-
- def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = {
- // the parts all have the same position information (as the expression is generated by the compiler)
- // the args have correct position information
-
- // the following conditions can only be violated if invoked directly
- if (parts.length != args.length + 1) {
- if(parts.length == 0)
- c.abort(c.prefix.tree.pos, "too few parts")
- else if(args.length + 1 < parts.length)
- c.abort(if(args.length==0) c.enclosingPosition else args.last.pos,
- "too few arguments for interpolated string")
- else
- c.abort(args(parts.length-1).pos,
- "too many arguments for interpolated string")
- }
-
- val pi = parts.iterator
- val bldr = new java.lang.StringBuilder
- val evals = ListBuffer[ValDef]()
- val ids = ListBuffer[Ident]()
- val argStack = Stack(args : _*)
-
- def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.freshName("arg$"))
- evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
- ids += Ident(freshName)
- }
-
- def isFlag(ch: Char): Boolean = {
- ch match {
- case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
- case _ => false
- }
- }
-
- def checkType(arg: Tree, variants: Type*): Option[Type] = {
- variants.find(arg.tpe <:< _).orElse(
- variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse(
- Some(variants(0))
- )
- )
- }
-
- val stdContextTags = new { val tc: c.type = c } with StdContextTags
- import stdContextTags._
-
- def conversionType(ch: Char, arg: Tree): Option[Type] = {
- ch match {
- case 'b' | 'B' =>
- if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
- case 'h' | 'H' =>
- Some(AnyTpe)
- case 's' | 'S' =>
- Some(AnyTpe)
- case 'c' | 'C' =>
- checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
- case 'd' | 'o' | 'x' | 'X' =>
- checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
- case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' =>
- checkType(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
- case 't' | 'T' =>
- checkType(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
- case _ => None
- }
- }
-
- def copyString(first: Boolean): Unit = {
- val strTree = pi.next()
- val rawStr = strTree match {
- case Literal(Constant(str: String)) => str
- case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
- }
- val str = StringContext.treatEscapes(rawStr)
- val strLen = str.length
- val strIsEmpty = strLen == 0
- def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
- def isPercent(idx: Int) = charAtIndexIs(idx, '%')
- def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
- var idx = 0
-
- def errorAtIndex(idx: Int, msg: String) = c.error(Position.offset(strTree.pos.source, strTree.pos.point + idx), msg)
- def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
- def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
- def nonEscapedPercent(idx: Int) = errorAtIndex(idx,
- "conversions must follow a splice; use %% for literal %, %n for newline")
-
- // STEP 1: handle argument conversion
- // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
- // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
- // 3) "...${smth}%" => error
- // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
- // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
- // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
- // 7) "...${smth}[%illegalJavaConversion]" => error
- if (!first) {
- val arg = argStack.pop()
- if (isConversion(0)) {
- // PRE str is not empty and str(0) == '%'
- // argument index parameter is not allowed, thus parse
- // [flags][width][.precision]conversion
- var pos = 1
- while (pos < strLen && isFlag(str charAt pos)) pos += 1
- while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- if (pos < strLen && str.charAt(pos) == '.') {
- pos += 1
- while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- }
- if (pos < strLen) {
- conversionType(str charAt pos, arg) match {
- case Some(tpe) => defval(arg, tpe)
- case None => illegalConversionCharacter(pos)
- }
- } else {
- wrongConversionString(pos - 1)
- }
- idx = 1
- } else {
- bldr append "%s"
- defval(arg, AnyTpe)
- }
- }
-
- // STEP 2: handle the rest of the text
- // 1) %n tokens are left as is
- // 2) %% tokens are left as is
- // 3) other usages of percents are reported as errors
- if (!strIsEmpty) {
- while (idx < strLen) {
- if (isPercent(idx)) {
- if (isConversion(idx)) nonEscapedPercent(idx)
- else idx += 1 // skip n and % in %n and %%
- }
- idx += 1
- }
- bldr append (str take idx)
- }
- }
-
- copyString(first = true)
- while (pi.hasNext) {
- copyString(first = false)
- }
-
- val fstring = bldr.toString
-// val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*))
-// https://issues.scala-lang.org/browse/SI-5824, therefore
- val expr =
- Apply(
- Select(
- Literal(Constant(fstring)),
- newTermName("format")),
- List(ids: _* )
- )
-
- Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
- }
-
-}
diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
index f8ded56ec6..6f369212ad 100644
--- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -12,9 +12,10 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val
extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
override def transformedType(sym: Symbol) =
- erasure.transformInfo(sym,
- uncurry.transformInfo(sym,
- refChecks.transformInfo(sym, sym.info)))
+ postErasure.transformInfo(sym,
+ erasure.transformInfo(sym,
+ uncurry.transformInfo(sym,
+ refChecks.transformInfo(sym, sym.info))))
override def isCompilerUniverse = true
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index 2a3851ba85..4a3db09909 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -71,12 +71,6 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
*/
def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
- /** Recursively resets symbols and types in a given tree.
- * WARNING: Don't use this API, go for [[untypecheck]] instead.
- */
- @deprecated("Use `tb.untypecheck` instead", "2.11.0")
- def resetAllAttrs(tree: u.Tree): u.Tree
-
/** Recursively resets locally defined symbols and types in a given tree.
* WARNING: Don't use this API, go for [[untypecheck]] instead.
*/
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index c54f2d06ba..7bae3203c2 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -249,7 +249,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
- val cleanedUp = resetLocalAttrs(moduledef)
+ val cleanedUp = resetAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp.asInstanceOf[ModuleDef]
}
@@ -398,18 +398,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
uitree
}
- def resetAllAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
- import compilerApi._
- val ctree: compiler.Tree = importer.importTree(tree)
- val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
- val uttree = exporter.importTree(ttree)
- uttree
- }
-
def resetLocalAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
import compilerApi._
val ctree: compiler.Tree = importer.importTree(tree)
- val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
+ val ttree: compiler.Tree = compiler.resetAttrs(ctree)
val uttree = exporter.importTree(ttree)
uttree
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
index 8a54519401..2027d43264 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -31,26 +31,28 @@ trait Holes { self: Quasiquotes =>
import definitions._
import universeTypes._
- protected lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
- protected def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
- protected def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
- protected def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
- protected def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
- protected def isNativeType(tpe: Type) =
+ private lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
+ private def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
+ private def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
+ private def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
+ private def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
+ private def isNativeType(tpe: Type) =
(tpe <:< treeType) || (tpe <:< nameType) || (tpe <:< modsType) ||
(tpe <:< flagsType) || (tpe <:< symbolType)
- protected def isBottomType(tpe: Type) =
+ private def isBottomType(tpe: Type) =
tpe <:< NothingClass.tpe || tpe <:< NullClass.tpe
- protected def stripIterable(tpe: Type, limit: Option[Cardinality] = None): (Cardinality, Type) =
+ private def extractIterableTParam(tpe: Type) =
+ IterableTParam.asSeenFrom(tpe, IterableClass)
+ private def stripIterable(tpe: Type, limit: Option[Cardinality] = None): (Cardinality, Type) =
if (limit.map { _ == NoDot }.getOrElse { false }) (NoDot, tpe)
else if (tpe != null && !isIterableType(tpe)) (NoDot, tpe)
else if (isBottomType(tpe)) (NoDot, tpe)
else {
- val targ = IterableTParam.asSeenFrom(tpe, IterableClass)
+ val targ = extractIterableTParam(tpe)
val (card, innerTpe) = stripIterable(targ, limit.map { _.pred })
(card.succ, innerTpe)
}
- protected def iterableTypeFromCard(n: Cardinality, tpe: Type): Type = {
+ private def iterableTypeFromCard(n: Cardinality, tpe: Type): Type = {
if (n == NoDot) tpe
else appliedType(IterableClass.toType, List(iterableTypeFromCard(n.pred, tpe)))
}
@@ -74,8 +76,7 @@ trait Holes { self: Quasiquotes =>
class ApplyHole(card: Cardinality, splicee: Tree) extends Hole {
val (strippedTpe, tpe): (Type, Type) = {
- if (stripIterable(splicee.tpe)._1.value < card.value) cantSplice()
- val (_, strippedTpe) = stripIterable(splicee.tpe, limit = Some(card))
+ val (strippedCard, strippedTpe) = stripIterable(splicee.tpe, limit = Some(card))
if (isBottomType(strippedTpe)) cantSplice()
else if (isNativeType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, strippedTpe))
else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, treeType))
@@ -88,14 +89,14 @@ trait Holes { self: Quasiquotes =>
else if (isLiftableType(itpe)) lifted(itpe)(tree)
else global.abort("unreachable")
if (card == NoDot) inner(strippedTpe)(splicee)
- else iterated(card, strippedTpe, inner(strippedTpe))(splicee)
+ else iterated(card, splicee, splicee.tpe)
}
val pos = splicee.pos
val cardinality = stripIterable(tpe)._1
- protected def cantSplice(): Nothing = {
+ private def cantSplice(): Nothing = {
val (iterableCard, iterableType) = stripIterable(splicee.tpe)
val holeCardMsg = if (card != NoDot) s" with $card" else ""
val action = "splice " + splicee.tpe + holeCardMsg
@@ -111,28 +112,66 @@ trait Holes { self: Quasiquotes =>
c.abort(splicee.pos, s"Can't $action, $advice")
}
- protected def lifted(tpe: Type)(tree: Tree): Tree = {
+ private def lifted(tpe: Type)(tree: Tree): Tree = {
val lifter = inferLiftable(tpe)
assert(lifter != EmptyTree, s"couldnt find a liftable for $tpe")
val lifted = Apply(lifter, List(tree))
atPos(tree.pos)(lifted)
}
- protected def iterated(card: Cardinality, tpe: Type, elementTransform: Tree => Tree = identity)(tree: Tree): Tree = {
- assert(card != NoDot)
- def reifyIterable(tree: Tree, n: Cardinality): Tree = {
- def loop(tree: Tree, n: Cardinality): Tree =
- if (n == NoDot) elementTransform(tree)
- else {
- val x: TermName = c.freshName()
- val wrapped = reifyIterable(Ident(x), n.pred)
- val xToWrapped = Function(List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)), wrapped)
- Select(Apply(Select(tree, nme.map), List(xToWrapped)), nme.toList)
- }
- if (tree.tpe != null && (tree.tpe <:< listTreeType || tree.tpe <:< listListTreeType)) tree
- else atPos(tree.pos)(loop(tree, n))
+ private def toStats(tree: Tree): Tree =
+ // q"$u.build.toStats($tree)"
+ Apply(Select(Select(u, nme.build), nme.toStats), tree :: Nil)
+
+ private def toList(tree: Tree, tpe: Type): Tree =
+ if (isListType(tpe)) tree
+ else Select(tree, nme.toList)
+
+ private def mapF(tree: Tree, f: Tree => Tree): Tree =
+ if (f(Ident(TermName("x"))) equalsStructure Ident(TermName("x"))) tree
+ else {
+ val x: TermName = c.freshName()
+ // q"$tree.map { $x => ${f(Ident(x))} }"
+ Apply(Select(tree, nme.map),
+ Function(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree) :: Nil,
+ f(Ident(x))) :: Nil)
}
- reifyIterable(tree, card)
+
+ private object IterableType {
+ def unapply(tpe: Type): Option[Type] =
+ if (isIterableType(tpe)) Some(extractIterableTParam(tpe)) else None
+ }
+
+ private object LiftedType {
+ def unapply(tpe: Type): Option[Tree => Tree] =
+ if (tpe <:< treeType) Some(t => t)
+ else if (isLiftableType(tpe)) Some(lifted(tpe)(_))
+ else None
+ }
+
+ /** Map high-cardinality splice onto an expression that eveluates as a list of given cardinality.
+ *
+ * All possible combinations of representations are given in the table below:
+ *
+ * input output for T <: Tree output for T: Liftable
+ *
+ * ..${x: Iterable[T]} x.toList x.toList.map(lift)
+ * ..${x: T} toStats(x) toStats(lift(x))
+ *
+ * ...${x: Iterable[Iterable[T]]} x.toList { _.toList } x.toList.map { _.toList.map(lift) }
+ * ...${x: Iterable[T]} x.toList.map { toStats(_) } x.toList.map { toStats(lift(_)) }
+ * ...${x: T} toStats(x).map { toStats(_) } toStats(lift(x)).map { toStats(_) }
+ *
+ * For optimization purposes `x.toList` is represented as just `x` if it is statically known that
+ * x is not just an Iterable[T] but a List[T]. Similarly no mapping is performed if mapping function is
+ * known to be an identity.
+ */
+ private def iterated(card: Cardinality, tree: Tree, tpe: Type): Tree = (card, tpe) match {
+ case (DotDot, tpe @ IterableType(LiftedType(lift))) => mapF(toList(tree, tpe), lift)
+ case (DotDot, LiftedType(lift)) => toStats(lift(tree))
+ case (DotDotDot, tpe @ IterableType(inner)) => mapF(toList(tree, tpe), t => iterated(DotDot, t, inner))
+ case (DotDotDot, LiftedType(lift)) => mapF(toStats(lift(tree)), toStats)
+ case _ => global.abort("unreachable")
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 1bd9323752..fcb8734644 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -80,8 +80,20 @@ trait Parsers { self: Quasiquotes =>
}
import treeBuilder.{global => _, unit => _, _}
+ def quasiquoteParam(name: Name, flags: FlagSet = NoFlags) =
+ ValDef(Modifiers(flags), name.toTermName, Ident(tpnme.QUASIQUOTE_PARAM), EmptyTree)
+
// q"def foo($x)"
- override def allowTypelessParams = true
+ override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
+ if (isHole && lookingAhead { in.token == COMMA || in.token == RPAREN }) {
+ quasiquoteParam(ident(), implicitmod)
+ } else super.param(owner, implicitmod, caseParam)
+
+ // q"($x) => ..." && q"class X { selfie => }
+ override def convertToParam(tree: Tree): ValDef = tree match {
+ case Ident(name) if isHole(name) => quasiquoteParam(name)
+ case _ => super.convertToParam(tree)
+ }
// q"foo match { case $x }"
override def caseClause(): CaseDef =
@@ -160,17 +172,26 @@ trait Parsers { self: Quasiquotes =>
}
}
- object TermParser extends Parser {
- def entryPoint = { parser =>
- parser.templateOrTopStatSeq() match {
- case head :: Nil => Block(Nil, head)
- case lst => gen.mkTreeOrBlock(lst)
- }
+ /** Wrapper around tree parsed in q"..." quote. Needed to support ..$ splicing on top-level. */
+ object Q {
+ def apply(tree: Tree): Block = Block(Nil, tree).updateAttachment(Q)
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Block(Nil, contents) if tree.hasAttachment[Q.type] => Some(contents)
+ case _ => None
}
}
+ object TermParser extends Parser {
+ def entryPoint = parser => Q(gen.mkTreeOrBlock(parser.templateOrTopStatSeq()))
+ }
+
object TypeParser extends Parser {
- def entryPoint = _.typ()
+ def entryPoint = { parser =>
+ if (parser.in.token == EOF)
+ TypeTree()
+ else
+ parser.typ()
+ }
}
object CaseParser extends Parser {
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index bdb44ad9a2..825d0c04f3 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -17,7 +17,6 @@ trait Placeholders { self: Quasiquotes =>
// Step 1: Transform Scala source with holes into vanilla Scala source
- lazy val holeMap = new HoleMap()
lazy val posMap = mutable.ListMap[Position, (Int, Int)]()
lazy val code = {
val sb = new StringBuilder()
@@ -58,25 +57,27 @@ trait Placeholders { self: Quasiquotes =>
sb.toString
}
- class HoleMap {
- private var underlying = immutable.SortedMap[String, Hole]()
- private val accessed = mutable.Set[String]()
+ object holeMap {
+ private val underlying = mutable.LinkedHashMap.empty[String, Hole]
+ private val accessed = mutable.Set.empty[String]
def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
- def contains(key: Name) = underlying.contains(key.toString)
- def apply(key: Name) = {
- val s = key.toString
- accessed += s
- underlying(s)
- }
- def update(key: Name, hole: Hole) = {
+ def contains(key: Name): Boolean = underlying.contains(key.toString)
+ def apply(key: Name): Hole = {
+ val skey = key.toString
+ val value = underlying(skey)
+ accessed += skey
+ value
+ }
+ def update(key: Name, hole: Hole) =
underlying += key.toString -> hole
- }
- def get(key: Name) = {
- val s = key.toString
- accessed += s
- underlying.get(s)
- }
- def toList = underlying.toList
+ def get(key: Name): Option[Hole] = {
+ val skey = key.toString
+ underlying.get(skey).map { v =>
+ accessed += skey
+ v
+ }
+ }
+ def keysIterator: Iterator[TermName] = underlying.keysIterator.map(TermName(_))
}
// Step 2: Transform vanilla Scala AST into an AST with holes
@@ -95,7 +96,6 @@ trait Placeholders { self: Quasiquotes =>
case Ident(name) => name
case Bind(name, Ident(nme.WILDCARD)) => name
case TypeDef(_, name, List(), TypeBoundsTree(EmptyTree, EmptyTree)) => name
- case ValDef(_, name, TypeTree(), EmptyTree) => name
}
}
@@ -111,6 +111,12 @@ trait Placeholders { self: Quasiquotes =>
}
}
+ object ParamPlaceholder extends HolePlaceholder {
+ def matching = {
+ case ValDef(_, name, Ident(tpnme.QUASIQUOTE_PARAM), EmptyTree) => name
+ }
+ }
+
object TuplePlaceholder {
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case Apply(Ident(nme.QUASIQUOTE_TUPLE), args) => Some(args)
@@ -174,4 +180,4 @@ trait Placeholders { self: Quasiquotes =>
case _ => None
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index 3e703924e8..396688c437 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -16,7 +16,7 @@ abstract class Quasiquotes extends Parsers
lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
case Apply(build.SyntacticTypeApplied(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), _), args0) =>
- debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
+ debug(s"parse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
val parts1 = parts0.map {
case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
@@ -43,8 +43,8 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
- debug(s"\nmacro application:\n${c.macroApplication}\n")
- debug(s"\ncode to parse:\n$code\n")
+ debug(s"macro application:\n${c.macroApplication}\n")
+ debug(s"code to parse:\n$code\n")
val tree = parse(code)
debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 87ab52414c..017e966f63 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -29,7 +29,7 @@ trait Reifiers { self: Quasiquotes =>
/** Map that stores freshly generated names linked to the corresponding names in the reified tree.
* This information is used to reify names created by calls to freshTermName and freshTypeName.
*/
- var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+ val nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
/** Wraps expressions into:
* a block which starts with a sequence of vals that correspond
@@ -71,7 +71,7 @@ trait Reifiers { self: Quasiquotes =>
// q"..$freshdefs; $tree"
SyntacticBlock(freshdefs :+ tree)
} else {
- val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val freevars = holeMap.keysIterator.map(Ident(_)).toList
val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
val cases =
if(isVarPattern) {
@@ -137,6 +137,7 @@ trait Reifiers { self: Quasiquotes =>
case RefineStatPlaceholder(hole) => reifyRefineStat(hole)
case EarlyDefPlaceholder(hole) => reifyEarlyDef(hole)
case PackageStatPlaceholder(hole) => reifyPackageStat(hole)
+ case ParamPlaceholder(hole) => hole.tree
// for enumerators are checked not during splicing but during
// desugaring of the for loop in SyntacticFor & SyntacticForYield
case ForEnumPlaceholder(hole) => hole.tree
@@ -159,8 +160,12 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
case SyntacticNew(earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
- case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
+ case SyntacticDefDef(mods, name, tparams, build.ImplicitParams(vparamss, implparams), tpt, rhs) =>
+ if (implparams.nonEmpty)
+ mirrorBuildCall(nme.SyntacticDefDef, reify(mods), reify(name), reify(tparams),
+ reifyBuildCall(nme.ImplicitParams, vparamss, implparams), reify(tpt), reify(rhs))
+ else
+ reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
case SyntacticVarDef(mods, name, tpt, rhs) =>
@@ -185,12 +190,18 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticFunction, args, body)
case SyntacticIdent(name, isBackquoted) =>
reifyBuildCall(nme.SyntacticIdent, name, isBackquoted)
- case Block(Nil, Placeholder(Hole(tree, DotDot))) =>
+ case SyntacticEmptyTypeTree() =>
+ reifyBuildCall(nme.SyntacticEmptyTypeTree)
+ case SyntacticImport(expr, selectors) =>
+ reifyBuildCall(nme.SyntacticImport, expr, selectors)
+ case Q(Placeholder(Hole(tree, DotDot))) =>
mirrorBuildCall(nme.SyntacticBlock, tree)
- case Block(Nil, other) =>
+ case Q(other) =>
reifyTree(other)
- case Block(stats, last) =>
- reifyBuildCall(nme.SyntacticBlock, stats :+ last)
+ // Syntactic block always matches so we have to be careful
+ // not to cause infinite recursion.
+ case block @ SyntacticBlock(stats) if block.isInstanceOf[Block] =>
+ reifyBuildCall(nme.SyntacticBlock, stats)
case Try(block, catches, finalizer) =>
reifyBuildCall(nme.SyntacticTry, block, catches, finalizer)
case Match(selector, cases) =>
@@ -298,7 +309,7 @@ trait Reifiers { self: Quasiquotes =>
* > reifyMultiCardinalityList(lst) { ... } { ... }
* q"List($foo, $bar) ++ ${holeMap(qq$f3948f9s$1).tree}"
*/
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree
/** Reifies arbitrary list filling ..$x and ...$y holeMap when they are put
* in the correct position. Fallbacks to regular reification for non-high cardinality
@@ -311,6 +322,8 @@ trait Reifiers { self: Quasiquotes =>
case EarlyDefPlaceholder(h @ Hole(_, DotDot)) => reifyEarlyDef(h)
case PackageStatPlaceholder(h @ Hole(_, DotDot)) => reifyPackageStat(h)
case ForEnumPlaceholder(Hole(tree, DotDot)) => tree
+ case ParamPlaceholder(Hole(tree, DotDot)) => tree
+ case List(ParamPlaceholder(Hole(tree, DotDotDot))) => tree
case List(Placeholder(Hole(tree, DotDotDot))) => tree
} {
reify(_)
@@ -352,10 +365,10 @@ trait Reifiers { self: Quasiquotes =>
}
class ApplyReifier extends Reifier(isReifyingExpressions = true) {
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree =
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree =
if (xs.isEmpty) mkList(Nil)
else {
- def reifyGroup(group: List[T]): Tree = group match {
+ def reifyGroup(group: List[Any]): Tree = group match {
case List(elem) if fill.isDefinedAt(elem) => fill(elem)
case elems => mkList(elems.map(fallback))
}
@@ -394,14 +407,26 @@ trait Reifiers { self: Quasiquotes =>
}
class UnapplyReifier extends Reifier(isReifyingExpressions = false) {
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree = xs match {
- case init :+ last if fill.isDefinedAt(last) =>
- init.foldRight[Tree](fill(last)) { (el, rest) =>
- val cons = Select(Select(Select(Ident(nme.scala_), nme.collection), nme.immutable), nme.CONS)
- Apply(cons, List(fallback(el), rest))
- }
- case _ =>
- mkList(xs.map(fallback))
+ private def collection = ScalaDot(nme.collection)
+ private def collectionColonPlus = Select(collection, nme.COLONPLUS)
+ private def collectionCons = Select(Select(collection, nme.immutable), nme.CONS)
+ private def collectionNil = Select(Select(collection, nme.immutable), nme.Nil)
+ // pq"$lhs :+ $rhs"
+ private def append(lhs: Tree, rhs: Tree) = Apply(collectionColonPlus, lhs :: rhs :: Nil)
+ // pq"$lhs :: $rhs"
+ private def cons(lhs: Tree, rhs: Tree) = Apply(collectionCons, lhs :: rhs :: Nil)
+
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree = {
+ val grouped = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+ def appended(lst: List[Any], init: Tree) = lst.foldLeft(init) { (l, r) => append(l, fallback(r)) }
+ def prepended(lst: List[Any], init: Tree) = lst.foldRight(init) { (l, r) => cons(fallback(l), r) }
+ grouped match {
+ case init :: List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, prepended(init, fill(hole)))
+ case init :: List(hole) :: Nil if fill.isDefinedAt(hole) => prepended(init, fill(hole))
+ case List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, fill(hole))
+ case List(hole) :: Nil if fill.isDefinedAt(hole) => fill(hole)
+ case _ => prepended(xs, collectionNil)
+ }
}
override def reifyModifiers(m: Modifiers) =
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 35528a276d..c2aab19f18 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -4,7 +4,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index d028dcc21e..8a599bd8c7 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -7,5 +7,6 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index 4f67a22b8f..bf718c27cc 100644
--- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -64,9 +64,12 @@ trait ContextTrees { self: Global =>
def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
if (contexts.isEmpty) None
else {
+ // binary search on contexts, loop invar: lo <= hi, recursion metric: `hi - lo`
@tailrec
def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
- if (pos properlyPrecedes contexts(lo).pos)
+ // [SI-8239] enforce loop invariant & ensure recursion metric decreases monotonically on every recursion
+ if (lo > hi) previousSibling
+ else if (pos properlyPrecedes contexts(lo).pos)
previousSibling
else if (contexts(hi).pos properlyPrecedes pos)
Some(contexts(hi))
@@ -76,9 +79,18 @@ trait ContextTrees { self: Global =>
if (midpos includes pos)
Some(contexts(mid))
else if (midpos properlyPrecedes pos)
+ // recursion metric: (hi - ((lo + hi)/2 + 1)) < (hi - lo)
+ // since (hi - ((lo + hi)/2 + 1)) - (hi - lo) = lo - ((lo + hi)/2 + 1) < 0
+ // since 2*lo - lo - hi - 2 = lo - hi - 2 < 0
+ // since lo < hi + 2
+ // can violate lo <= hi, hence the lo > hi check at the top [SI-8239]
loop(mid + 1, hi, Some(contexts(mid)))
- else
+ else if (lo != hi) // avoid looping forever (lo == hi violates the recursion metric) [SI-8239]
+ // recursion metric: ((lo + hi)/2) - lo < (hi - lo)
+ // since ((lo + hi)/2) - lo - (hi - lo) = ((lo + hi)/2) - hi < 0
+ // since 2 * (((lo + hi)/2) - hi) = lo - hi < 0 since lo < hi
loop(lo, mid, previousSibling)
+ else previousSibling
}
}
loop(0, contexts.length - 1, None)
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index e7d87dd3fe..8a24f721d7 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -247,15 +247,17 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (kvs1 eq kvs)
- this
- else if (kvs1.isEmpty)
- HashMap.empty[A,B]
- else if(kvs1.tail.isEmpty) {
- val kv = kvs1.head
- new HashMap1[A,B](kv._1,hash,kv._2,kv)
- } else
- new HashMapCollision1(hash, kvs1)
+ kvs1.size match {
+ case 0 =>
+ HashMap.empty[A,B]
+ case 1 =>
+ val kv = kvs1.head
+ new HashMap1(kv._1,hash,kv._2,kv)
+ case x if x == kvs.size =>
+ this
+ case _ =>
+ new HashMapCollision1(hash, kvs1)
+ }
} else this
override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = {
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 90aabc5a9a..c3728fa02a 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -325,36 +325,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
// Create a proxy for Java serialization that allows us to avoid mutation
// during de-serialization. This is the Serialization Proxy Pattern.
- protected final def writeReplace(): AnyRef = new SerializationProxy(this)
-}
-
-@SerialVersionUID(1L)
-private class SerializationProxy[B](@transient private var orig: List[B]) extends Serializable {
-
- private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = orig
- while (!xs.isEmpty) {
- out.writeObject(xs.head)
- xs = xs.tail
- }
- out.writeObject(ListSerializeEnd)
- }
-
- // Java serialization calls this before readResolve during de-serialization.
- // Read the whole list and store it in `orig`.
- private def readObject(in: ObjectInputStream) {
- val builder = List.newBuilder[B]
- while (true) in.readObject match {
- case ListSerializeEnd =>
- orig = builder.result()
- return
- case a =>
- builder += a.asInstanceOf[B]
- }
- }
-
- // Provide the result stored in `orig` for Java serialization
- private def readResolve(): AnyRef = orig
+ protected final def writeReplace(): AnyRef = new List.SerializationProxy(this)
}
/** The empty list.
@@ -385,8 +356,7 @@ case object Nil extends List[Nothing] {
* @version 1.0, 15/07/2003
* @since 2.8
*/
-final case class ::[B](private val hd: B, private[scala] var tl: List[B]) extends List[B] {
- override def head : B = hd
+final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
override def tail : List[B] = tl
override def isEmpty: Boolean = false
}
@@ -405,6 +375,35 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
+
+ @SerialVersionUID(1L)
+ private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
+
+ private def writeObject(out: ObjectOutputStream) {
+ var xs: List[A] = orig
+ while (!xs.isEmpty) {
+ out.writeObject(xs.head)
+ xs = xs.tail
+ }
+ out.writeObject(ListSerializeEnd)
+ }
+
+ // Java serialization calls this before readResolve during de-serialization.
+ // Read the whole list and store it in `orig`.
+ private def readObject(in: ObjectInputStream) {
+ val builder = List.newBuilder[A]
+ while (true) in.readObject match {
+ case ListSerializeEnd =>
+ orig = builder.result()
+ return
+ case a =>
+ builder += a.asInstanceOf[A]
+ }
+ }
+
+ // Provide the result stored in `orig` for Java serialization
+ private def readResolve(): AnyRef = orig
+ }
}
/** Only used for list serialization */
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index b2a1b1ce29..7c40e84280 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -123,12 +123,12 @@ extends AbstractMap[A, B]
def hasNext = !self.isEmpty
def next(): (A,B) =
if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.tail; res }
+ else { val res = (self.key, self.value); self = self.next; res }
}.toList.reverseIterator
protected def key: A = throw new NoSuchElementException("empty map")
protected def value: B = throw new NoSuchElementException("empty map")
- override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
/** This class represents an entry in the `ListMap`.
*/
@@ -142,7 +142,7 @@ extends AbstractMap[A, B]
override def size: Int = size0(this, 0)
// to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
+ @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
/** Is this an empty map?
*
@@ -163,7 +163,7 @@ extends AbstractMap[A, B]
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
- else apply0(cur.tail, k)
+ else apply0(cur.next, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
@@ -175,7 +175,7 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
- else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
+ else if (cur.next.nonEmpty) get0(cur.next, k) else None
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
@@ -196,12 +196,12 @@ extends AbstractMap[A, B]
if (cur.isEmpty)
acc.last
else if (k == cur.key)
- (cur.tail /: acc) {
+ (cur.next /: acc) {
case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
}
else
- remove0(k, cur.tail, cur::acc)
+ remove0(k, cur.next, cur::acc)
- override def tail: ListMap[A, B1] = ListMap.this
+ override protected def next: ListMap[A, B1] = ListMap.this
}
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index e21a8dfa8a..0fbf7942d4 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -82,6 +82,16 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 2 */
@@ -102,6 +112,17 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 3 */
@@ -123,6 +144,18 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 4 */
@@ -145,6 +178,19 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3); f(elem4)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3) || f(elem4)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3) && f(elem4)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else if (f(elem4)) Some(elem4)
+ else None
+ }
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 49c3b4c3cd..60de147477 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1108,11 +1108,15 @@ object Stream extends SeqFactory[Stream] {
override def isEmpty = false
override def head = hd
@volatile private[this] var tlVal: Stream[A] = _
- def tailDefined: Boolean = tlVal ne null
+ @volatile private[this] var tlGen = tl _
+ def tailDefined: Boolean = tlGen eq null
override def tail: Stream[A] = {
if (!tailDefined)
synchronized {
- if (!tailDefined) tlVal = tl
+ if (!tailDefined) {
+ tlVal = tlGen()
+ tlGen = null
+ }
}
tlVal
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 43d46cf4d0..8e1d950d00 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -131,6 +131,7 @@ self =>
* end characters, i.e. apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
+ @deprecated("Use `lines` instead.","2.11.0")
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index df74bb5187..47fb66744e 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -22,9 +22,9 @@ import generic.CanBuildFrom
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
*
- * This map is not indended to contain more than 2^29 entries (approximately
- * 500 million). The maximum capacity is 2^30, but performance will degrade
- * rapidly as 2^30 is approached.
+ * This map is not intended to contain more than 2^29^ entries (approximately
+ * 500 million). The maximum capacity is 2^30^, but performance will degrade
+ * rapidly as 2^30^ is approached.
*
*/
final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean)
@@ -129,9 +129,20 @@ extends AbstractMap[K, V]
override def getOrElseUpdate(key: K, defaultValue: => V): V = {
val h = hashOf(key)
- val i = seekEntryOrOpen(h, key)
+ var i = seekEntryOrOpen(h, key)
if (i < 0) {
- val value = defaultValue
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val oh = _hashes
+ val ans = defaultValue
+ if (oh ne _hashes) {
+ i = seekEntryOrOpen(h, key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
_size += 1
val j = i & IndexMask
_hashes(j) = h
@@ -280,24 +291,21 @@ extends AbstractMap[K, V]
private[this] val vz = _values
private[this] var index = 0
- private[this] var found = false
- def hasNext = found || (index<hz.length && {
+ def hasNext: Boolean = index<hz.length && {
var h = hz(index)
- if (h+h != 0) found = true
- else {
+ while (h+h == 0) {
index += 1
- while (index < hz.length && { h = hz(index); h+h == 0 }) index += 1
- found = (index < hz.length)
+ if (index >= hz.length) return false
+ h = hz(index)
}
- found
- })
+ true
+ }
- def next = {
- if (found || hasNext) {
- val ans = (_keys(index).asInstanceOf[K], _values(index).asInstanceOf[V])
+ def next: (K, V) = {
+ if (hasNext) {
+ val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V])
index += 1
- found = false
ans
}
else throw new NoSuchElementException("next")
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 81c381279f..984ae6f7cc 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -160,9 +160,20 @@ extends AbstractMap[Long, V]
else minValue.asInstanceOf[V]
}
else {
- val i = seekEntryOrOpen(key)
+ var i = seekEntryOrOpen(key)
if (i < 0) {
- val value = defaultValue
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val ok = _keys
+ val ans = defaultValue
+ if (ok ne _keys) {
+ i = seekEntryOrOpen(key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
_size += 1
val j = i & IndexMask
_keys(j) = key
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index d3c5a6b019..a55432fd71 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -14,9 +14,45 @@ import scala.annotation.implicitNotFound
import scala.util.Try
/**
- * An `ExecutionContext` is an abstraction over an entity that can execute program logic.
+ * An `ExecutionContext` can execute program logic, typically but not
+ * necessarily on a thread pool.
+ *
+ * APIs such as `Future.onComplete` require you to provide a callback
+ * and an implicit `ExecutionContext`. The implicit `ExecutionContext`
+ * will be used to execute the callback.
+ *
+ * It is possible to simply import
+ * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an
+ * implicit `ExecutionContext`. This global context is a reasonable
+ * default thread pool.
+ *
+ * However, application developers should carefully consider where they
+ * want to set policy; ideally, one place per application (or per
+ * logically-related section of code) will make a decision about
+ * which `ExecutionContext` to use. That is, you might want to avoid
+ * hardcoding `scala.concurrent.ExecutionContext.Implicits.global` all
+ * over the place in your code.
+ * One approach is to add `(implicit ec: ExecutionContext)`
+ * to methods which need an `ExecutionContext`. Then import a specific
+ * context in one place for the entire application or module,
+ * passing it implicitly to individual methods.
+ *
+ * A custom `ExecutionContext` may be appropriate to execute code
+ * which blocks on IO or performs long-running computations.
+ * `ExecutionContext.fromExecutorService` and `ExecutionContext.fromExecutor`
+ * are good ways to create a custom `ExecutionContext`.
+ *
+ * The intent of `ExecutionContext` is to lexically scope code execution.
+ * That is, each method, class, file, package, or application determines
+ * how to run its own code. This avoids issues such as running
+ * application callbacks on a thread pool belonging to a networking library.
+ * The size of a networking library's thread pool can be safely configured,
+ * knowing that only that library's network operations will be affected.
+ * Application callback execution can be configured separately.
*/
-@implicitNotFound("Cannot find an implicit ExecutionContext, either import scala.concurrent.ExecutionContext.Implicits.global or use a custom one")
+@implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass
+an (implicit ec: ExecutionContext) parameter to your method
+or import scala.concurrent.ExecutionContext.Implicits.global.""")
trait ExecutionContext {
/** Runs a block of code on this execution context.
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index d271c4cdeb..4ed0687334 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -494,9 +494,9 @@ object Future {
/** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
* Useful for reducing many `Future`s into a single `Future`.
*/
- def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
+ def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(successful(cbf(in))) {
- (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
+ (fr, fa) => for (r <- fr; a <- fa) yield (r += a)
} map (_.result())
}
@@ -569,9 +569,9 @@ object Future {
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
*/
- def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
+ def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
in.foldLeft(successful(cbf(in))) { (fr, a) =>
- val fb = fn(a.asInstanceOf[A])
+ val fb = fn(a)
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result())
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 86132bb876..6743b9e42a 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -67,7 +67,21 @@ import java.util.regex.{ Pattern, Matcher }
* Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a
* pattern match.
*
- * Note, however, that when Regex is used as an extractor in a pattern match, it
+ * Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]]
+ * needs to be initialized (by calling `hasNext` or `next()`, or causing these to be
+ * called) before information about a match can be retrieved:
+ *
+ * {{{
+ * val msg = "I love Scala"
+ *
+ * // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException
+ *
+ * val matches = " ".r.findAllIn(msg)
+ * matches.hasNext // initializes the matcher
+ * val start = matches.start
+ * }}}
+ *
+ * When Regex is used as an extractor in a pattern match, note that it
* only succeeds if the whole text can be matched. For this reason, one usually
* calls a method to find the matching substrings, and then use it as an extractor
* to break match into subgroups.
@@ -267,6 +281,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* that can be queried for data such as the text that precedes the
* match, subgroups, etc.
*
+ * Attempting to retrieve information about a match before initializing
+ * the iterator can result in [[java.lang.IllegalStateException]]s. See
+ * [[scala.util.matching.Regex.MatchIterator]] for details.
+ *
* @param source The text to match against.
* @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
@@ -476,15 +494,7 @@ trait UnanchoredRegex extends Regex {
}
/** This object defines inner classes that describe
- * regex matches and helper objects. The class hierarchy
- * is as follows:
- *
- * {{{
- * MatchData
- * / \
- * MatchIterator Match
- * }}}
- *
+ * regex matches and helper objects.
*/
object Regex {
@@ -634,7 +644,15 @@ object Regex {
def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
}
- /** A class to step through a sequence of regex matches
+ /** A class to step through a sequence of regex matches.
+ *
+ * All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw
+ * a [[java.lang.IllegalStateException]] until the matcher is initialized. The
+ * matcher can be initialized by calling `hasNext` or `next()` or causing these
+ * methods to be called, such as by invoking `toString` or iterating through
+ * the iterator's elements.
+ *
+ * @see [[java.util.regex.Matcher]]
*/
class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 10c2def72a..ec20a89a10 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -72,6 +72,8 @@ private[reflect] trait BuildUtils { self: Universe =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+ def toStats(tree: Tree): List[Tree]
+
def mkAnnotation(tree: Tree): Tree
def mkAnnotation(trees: List[Tree]): List[Tree]
@@ -94,6 +96,13 @@ private[reflect] trait BuildUtils { self: Universe =>
def freshTypeName(prefix: String): TypeName
+ val ImplicitParams: ImplicitParamsExtractor
+
+ trait ImplicitParamsExtractor {
+ def apply(paramss: List[List[ValDef]], implparams: List[ValDef]): List[List[ValDef]]
+ def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])]
+ }
+
val ScalaDot: ScalaDotExtractor
trait ScalaDotExtractor {
@@ -126,8 +135,8 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
- constrMods: Modifiers, vparamss: List[List[Tree]], earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
+ constrMods: Modifiers, vparamss: List[List[Tree]],
+ earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -145,7 +154,7 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree
+ parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -153,7 +162,7 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticPackageObjectDefExtractor {
def apply(name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree
+ parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef
def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -189,17 +198,18 @@ private[reflect] trait BuildUtils { self: Universe =>
val SyntacticFunction: SyntacticFunctionExtractor
trait SyntacticFunctionExtractor {
- def apply(params: List[Tree], body: Tree): Tree
+ def apply(params: List[Tree], body: Tree): Function
- def unapply(tree: Tree): Option[(List[ValDef], Tree)]
+ def unapply(tree: Function): Option[(List[ValDef], Tree)]
}
val SyntacticDefDef: SyntacticDefDefExtractor
trait SyntacticDefDefExtractor {
- def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+ vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef
- def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)]
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
}
val SyntacticValDef: SyntacticValDefExtractor
@@ -238,6 +248,13 @@ private[reflect] trait BuildUtils { self: Universe =>
def unapply(tree: Tree): Option[(Tree)]
}
+ val SyntacticEmptyTypeTree: SyntacticEmptyTypeTreeExtractor
+
+ trait SyntacticEmptyTypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(tt: TypeTree): Boolean
+ }
+
val SyntacticFor: SyntacticForExtractor
val SyntacticForYield: SyntacticForExtractor
@@ -273,5 +290,11 @@ private[reflect] trait BuildUtils { self: Universe =>
def apply(name: Name, isBackquoted: Boolean = false): Ident
def unapply(tree: Ident): Option[(Name, Boolean)]
}
+
+ val SyntacticImport: SyntacticImportExtractor
+ trait SyntacticImportExtractor {
+ def apply(expr: Tree, selectors: List[Tree]): Import
+ def unapply(imp: Import): Some[(Tree, List[Tree])]
+ }
}
}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
index 5667d93e29..6539137cee 100644
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -52,7 +52,7 @@ package api
* val imported = importer.importTree(tree)
*
* // after the tree is imported, it can be evaluated as usual
- * val tree = toolBox.resetAllAttrs(imported.duplicate)
+ * val tree = toolBox.untypecheck(imported.duplicate)
* val valueOfX = toolBox.eval(imported).asInstanceOf[T]
* ...
* }
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 83da5141b9..60e00ca5fd 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -1066,7 +1066,7 @@ trait Trees { self: Universe =>
* UnApply(
* // a dummy node that carries the type of unapplication to patmat
* // the <unapply-selector> here doesn't have an underlying symbol
- * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
* Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
* // arguments of the unapply => nothing synthetic here
* List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index 7457910226..be76758224 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -134,7 +134,7 @@ import scala.language.implicitConversions
* reflection APIs provided by Java (for classes) and Scala (for types).</li>
*
* <li>'''Certain manifest operations(i.e., <:<, >:> and typeArguments) are not
- * supported.''' <br/>Instead, one culd use the reflection APIs provided by Java (for
+ * supported.''' <br/>Instead, one could use the reflection APIs provided by Java (for
* classes) and Scala (for types).</li>
*</ul>
*
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 1c9b77581a..85a8ee0185 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -81,14 +81,14 @@ abstract class Universe extends Symbols
with Liftables
with Quasiquotes
{
- /** Use `refiy` to produce the abstract syntax tree representing a given Scala expression.
+ /** Use `reify` to produce the abstract syntax tree representing a given Scala expression.
*
* For example:
*
* {{{
- * val five = reify{ 5 } // Literal(Constant(5))
- * reify{ 2 + 4 } // Apply( Select( Literal(Constant(2)), newTermName("\$plus")), List( Literal(Constant(4)) ) )
- * reify{ five.splice + 4 } // Apply( Select( Literal(Constant(5)), newTermName("\$plus")), List( Literal(Constant(4)) ) )
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 5.toString } // Apply(Select(Literal(Constant(5)), TermName("toString")), List())
+ * reify{ five.splice.toString } // Apply(Select(five, TermName("toString")), List())
* }}}
*
* The produced tree is path dependent on the Universe `reify` was called from.
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 4fde57ed02..d634034fe9 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -48,7 +48,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
- dropOtherAnnotations(annotations, cls).nonEmpty
+ dropOtherAnnotations(annotations, cls) ne Nil
def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
//OPT inlined from exists to save on #closures; was: annotations find (_ matches cls)
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 9b19dc11cb..c5581601de 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -33,19 +33,19 @@ trait BuildUtils { self: SymbolTable =>
}
def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- newFreeTermSymbol(newTermName(name), value, flags, origin)
+ newFreeTermSymbol(newTermName(name), value, flags, origin).markFlagsCompleted(mask = AllFlags)
def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), flags, origin)
+ newFreeTypeSymbol(newTypeName(name), flags, origin).markFlagsCompleted(mask = AllFlags)
def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
- owner.newNestedSymbol(name, pos, flags, isClass)
+ owner.newNestedSymbol(name, pos, flags, isClass).markFlagsCompleted(mask = AllFlags)
def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
sym.setAnnotations(annots)
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S =
- sym.setTypeSignature(tpe)
+ sym.setTypeSignature(tpe).markAllCompleted()
def This(sym: Symbol): Tree = self.This(sym)
@@ -61,6 +61,8 @@ trait BuildUtils { self: SymbolTable =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
+ def toStats(tree: Tree): List[Tree] = SyntacticBlock.unapply(tree).get
+
def mkAnnotation(tree: Tree): Tree = tree match {
case SyntacticNew(Nil, SyntacticApplied(SyntacticTypeApplied(_, _), _) :: Nil, noSelfType, Nil) =>
tree
@@ -71,18 +73,25 @@ trait BuildUtils { self: SymbolTable =>
def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation)
- def mkVparamss(argss: List[List[Tree]]): List[List[ValDef]] = argss.map(_.map(mkParam))
+ def mkParam(argss: List[List[Tree]], extraFlags: FlagSet = NoFlags): List[List[ValDef]] =
+ argss.map { args => args.map { mkParam(_, extraFlags) } }
- def mkParam(tree: Tree): ValDef = tree match {
+ def mkParam(tree: Tree, extraFlags: FlagSet): ValDef = tree match {
+ case Typed(Ident(name: TermName), tpt) =>
+ mkParam(ValDef(NoMods, name, tpt, EmptyTree), extraFlags)
case vd: ValDef =>
- var newmods = (vd.mods | PARAM) & (~DEFERRED)
+ var newmods = vd.mods & (~DEFERRED)
if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
- copyValDef(vd)(mods = newmods)
+ copyValDef(vd)(mods = newmods | extraFlags)
case _ =>
- throw new IllegalArgumentException(s"$tree is not valid represenation of function parameter, " +
+ throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " +
"""consider reformatting it into q"val $name: $T = $default" shape""")
}
+ def mkImplicitParam(args: List[Tree]): List[ValDef] = args.map(mkImplicitParam)
+
+ def mkImplicitParam(tree: Tree): ValDef = mkParam(tree, IMPLICIT | PARAM)
+
def mkTparams(tparams: List[Tree]): List[TypeDef] =
tparams.map {
case td: TypeDef => copyTypeDef(td)(mods = (td.mods | PARAM) & (~DEFERRED))
@@ -134,12 +143,22 @@ trait BuildUtils { self: SymbolTable =>
def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
- def freshTermName(prefix: String): TermName = self.freshTermName(prefix)
+ def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX): TermName = self.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = self.freshTypeName(prefix)
protected implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
+ object ImplicitParams extends ImplicitParamsExtractor {
+ def apply(paramss: List[List[ValDef]], implparams: List[ValDef]): List[List[ValDef]] =
+ if (implparams.nonEmpty) paramss :+ mkImplicitParam(implparams) else paramss
+
+ def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])] = vparamss match {
+ case init :+ (last @ (initlast :: _)) if initlast.mods.isImplicit => Some((init, last))
+ case _ => Some((vparamss, Nil))
+ }
+ }
+
object FlagsRepr extends FlagsReprExtractor {
def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
@@ -239,14 +258,10 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
- constrMods: Modifiers, vparamss: List[List[Tree]], earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
+ constrMods: Modifiers, vparamss: List[List[Tree]],
+ earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
- val vparamss0 = vparamss.map { _.map {
- case vd: ValDef => copyValDef(vd)(mods = (vd.mods | extraFlags) & (~DEFERRED))
- case tree => throw new IllegalArgumentException(s"$tree is not valid representation of class parameter, " +
- """consider reformatting it into q"val $name: $T = $default" shape""")
- } }
+ val vparamss0 = mkParam(vparamss, extraFlags)
val tparams0 = mkTparams(tparams)
val parents0 = gen.mkParents(mods,
if (mods.isCase) parents.filter {
@@ -289,7 +304,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticObjectDef extends SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]) =
+ parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef =
ModuleDef(mods, name, gen.mkTemplate(parents, mkSelfType(selfType), NoMods, Nil, earlyDefs ::: body))
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
@@ -302,7 +317,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
def apply(name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree =
+ parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef =
gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
@@ -381,27 +396,52 @@ trait BuildUtils { self: SymbolTable =>
}
}
+ object SyntheticUnit {
+ def unapply(tree: Tree): Boolean = tree match {
+ case Literal(Constant(())) if tree.hasAttachment[SyntheticUnitAttachment.type] => true
+ case _ => false
+ }
+ }
+
+ /** Syntactic combinator that abstracts over Block tree.
+ *
+ * Apart from providing a more straightforward api that exposes
+ * block as a list of elements rather than (stats, expr) pair
+ * it also:
+ *
+ * 1. Treats of q"" (empty tree) as zero-element block.
+ *
+ * 2. Strips trailing synthetic units which are inserted by the
+ * compiler if the block ends with a definition rather
+ * than an expression.
+ *
+ * 3. Matches non-block term trees and recognizes them as
+ * single-element blocks for sake of consistency with
+ * compiler's default to treat single-element blocks with
+ * expressions as just expressions.
+ */
object SyntacticBlock extends SyntacticBlockExtractor {
- def apply(stats: List[Tree]): Tree = gen.mkBlock(stats)
+ def apply(stats: List[Tree]): Tree =
+ if (stats.isEmpty) EmptyTree
+ else gen.mkBlock(stats)
def unapply(tree: Tree): Option[List[Tree]] = tree match {
- case self.Block(stats, expr) => Some(stats :+ expr)
- case _ if tree.isTerm => Some(tree :: Nil)
- case _ => None
+ case self.Block(stats, SyntheticUnit()) => Some(stats)
+ case self.Block(stats, expr) => Some(stats :+ expr)
+ case EmptyTree => Some(Nil)
+ case _ if tree.isTerm => Some(tree :: Nil)
+ case _ => None
}
}
object SyntacticFunction extends SyntacticFunctionExtractor {
- def apply(params: List[Tree], body: Tree): Tree = {
- val params0 :: Nil = mkVparamss(params :: Nil)
+ def apply(params: List[Tree], body: Tree): Function = {
+ val params0 :: Nil = mkParam(params :: Nil, PARAM)
require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values")
Function(params0, body)
}
- def unapply(tree: Tree): Option[(List[ValDef], Tree)] = tree match {
- case Function(params, body) => Some((params, body))
- case _ => None
- }
+ def unapply(tree: Function): Option[(List[ValDef], Tree)] = Function.unapply(tree)
}
object SyntacticNew extends SyntacticNewExtractor {
@@ -420,11 +460,15 @@ trait BuildUtils { self: SymbolTable =>
}
object SyntacticDefDef extends SyntacticDefDefExtractor {
- def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef =
- DefDef(mods, name, mkTparams(tparams), mkVparamss(vparamss), tpt, rhs)
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+ vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef = {
+ val vparamss0 = mkParam(vparamss, PARAM)
+ DefDef(mods, name, mkTparams(tparams), vparamss0, tpt, rhs)
+ }
- def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)] = tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) => Some((mods, name, tparams, vparamss, tpt, rhs))
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] = tree match {
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ Some((mods, name, tparams, vparamss, tpt, rhs))
case _ => None
}
}
@@ -490,13 +534,15 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[Tree] = gen.Filter.unapply(tree)
}
- // abstract over possible alternative representations of no type in valdef
- protected object EmptyTypTree {
- def unapply(tree: Tree): Boolean = tree match {
- case EmptyTree => true
- case tt: TypeTree if (tt.original == null || tt.original.isEmpty) => true
- case _ => false
- }
+ // If a tree in type position isn't provided by the user (e.g. `tpt` fields of
+ // `ValDef` and `DefDef`, function params etc), then it's going to be parsed as
+ // TypeTree with empty original and empty tpe. This extractor matches such trees
+ // so that one can write q"val x = 2" to match typecheck(q"val x = 2"). Note that
+ // TypeTree() is the only possible representation for empty trees in type positions.
+ // We used to sometimes receive EmptyTree in such cases, but not anymore.
+ object SyntacticEmptyTypeTree extends SyntacticEmptyTypeTreeExtractor {
+ def apply(): TypeTree = self.TypeTree()
+ def unapply(tt: TypeTree): Boolean = tt.original == null || tt.original.isEmpty
}
// match a sequence of desugared `val $pat = $value`
@@ -514,8 +560,8 @@ trait BuildUtils { self: SymbolTable =>
case ValDef(_, name1, _, Match(MaybeUnchecked(value), CaseDef(pat, EmptyTree, Ident(name2)) :: Nil)) :: UnPatSeq(rest)
if name1 == name2 =>
Some((pat, value) :: rest)
- // case q"${_} val $name: ${EmptyTypTree()} = $value" :: UnPatSeq(rest) =>
- case ValDef(_, name, EmptyTypTree(), value) :: UnPatSeq(rest) =>
+ // case q"${_} val $name: ${SyntacticEmptyTypeTree()} = $value" :: UnPatSeq(rest) =>
+ case ValDef(_, name, SyntacticEmptyTypeTree(), value) :: UnPatSeq(rest) =>
Some((Bind(name, self.Ident(nme.WILDCARD)), value) :: rest)
// case q"${_} val $name: $tpt = $value" :: UnPatSeq(rest) =>
case ValDef(_, name, tpt, value) :: UnPatSeq(rest) =>
@@ -557,8 +603,8 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case Function(ValDef(Modifiers(PARAM, _, _), name, tpt, EmptyTree) :: Nil, body) =>
tpt match {
- case EmptyTypTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
- case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
+ case SyntacticEmptyTypeTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
+ case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
}
case UnVisitor(_, CaseDef(pat, EmptyTree, body) :: Nil) =>
Some((pat, body))
@@ -723,6 +769,146 @@ trait BuildUtils { self: SymbolTable =>
}
def unapply(tree: Ident): Some[(Name, Boolean)] = Some((tree.name, tree.hasAttachment[BackquotedIdentifierAttachment.type]))
}
+
+ /** Facade over Imports and ImportSelectors that lets to structurally
+ * deconstruct/reconstruct them.
+ *
+ * Selectors are represented in the following way:
+ * 1. q"import foo._" <==> q"import foo.${pq"_"}"
+ * 2. q"import foo.bar" <==> q"import foo.${pq"bar"}"
+ * 3. q"import foo.{bar => baz}" <==> q"import foo.${pq"bar -> baz"}"
+ * 4. q"import foo.{bar => _}" <==> q"import foo.${pq"bar -> _"}"
+ *
+ * All names in selectors are TermNames despite the fact ImportSelector
+ * can theoretically contain TypeNames too (but they never do in practice.)
+ */
+ object SyntacticImport extends SyntacticImportExtractor {
+ // construct/deconstruct {_} import selector
+ private object WildcardSelector {
+ def apply(offset: Int): ImportSelector = ImportSelector(nme.WILDCARD, offset, null, -1)
+ def unapply(sel: ImportSelector): Option[Int] = sel match {
+ case ImportSelector(nme.WILDCARD, offset, null, -1) => Some(offset)
+ case _ => None
+ }
+ }
+
+ // construct/deconstruct {foo} import selector
+ private object NameSelector {
+ def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, name, offset)
+ def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+ case ImportSelector(name1, offset1, name2, offset2) if name1 == name2 && offset1 == offset2 =>
+ Some((name1.toTermName, offset1))
+ case _ =>
+ None
+ }
+ }
+
+ // construct/deconstruct {foo => bar} import selector
+ private object RenameSelector {
+ def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector =
+ ImportSelector(name1, offset1, name2, offset2)
+ def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = sel match {
+ case ImportSelector(_, _, null | nme.WILDCARD, _) =>
+ None
+ case ImportSelector(name1, offset1, name2, offset2) if name1 != name2 =>
+ Some((name1.toTermName, offset1, name2.toTermName, offset2))
+ case _ =>
+ None
+ }
+ }
+
+ // construct/deconstruct {foo => _} import selector
+ private object UnimportSelector {
+ def apply(name: TermName, offset: Int): ImportSelector =
+ ImportSelector(name, offset, nme.WILDCARD, -1)
+ def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+ case ImportSelector(name, offset, nme.WILDCARD, _) => Some((name.toTermName, offset))
+ case _ => None
+ }
+ }
+
+ // represent {_} import selector as pq"_"
+ private object WildcardSelectorRepr {
+ def apply(pos: Position): Tree = atPos(pos)(self.Ident(nme.WILDCARD))
+ def unapply(tree: Tree): Option[Position] = tree match {
+ case self.Ident(nme.WILDCARD) => Some(tree.pos)
+ case _ => None
+ }
+ }
+
+ // represent {foo} import selector as pq"foo"
+ private object NameSelectorRepr {
+ def apply(name: TermName, pos: Position): Tree = atPos(pos)(Bind(name, WildcardSelectorRepr(pos)))
+ def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+ case Bind(name, WildcardSelectorRepr(_)) => Some((name.toTermName, tree.pos))
+ case _ => None
+ }
+ }
+
+ // pq"left -> right"
+ private object Arrow {
+ def apply(left: Tree, right: Tree): Apply =
+ Apply(self.Ident(nme.MINGT), left :: right :: Nil)
+ def unapply(tree: Apply): Option[(Tree, Tree)] = tree match {
+ case Apply(self.Ident(nme.MINGT), left :: right :: Nil) => Some((left, right))
+ case _ => None
+ }
+ }
+
+ // represent {foo => bar} import selector as pq"foo -> bar"
+ private object RenameSelectorRepr {
+ def apply(name1: TermName, pos1: Position, name2: TermName, pos2: Position): Tree = {
+ val left = NameSelectorRepr(name1, pos1)
+ val right = NameSelectorRepr(name2, pos2)
+ atPos(wrappingPos(left :: right :: Nil))(Arrow(left, right))
+ }
+ def unapply(tree: Tree): Option[(TermName, Position, TermName, Position)] = tree match {
+ case Arrow(NameSelectorRepr(name1, pos1), NameSelectorRepr(name2, pos2)) =>
+ Some((name1.toTermName, pos1, name2.toTermName, pos2))
+ case _ =>
+ None
+ }
+ }
+
+ // represent {foo => _} import selector as pq"foo -> _"
+ private object UnimportSelectorRepr {
+ def apply(name: TermName, pos: Position): Tree =
+ atPos(pos)(Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(pos)))
+ def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+ case Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(_)) =>
+ Some((name, pos))
+ case _ =>
+ None
+ }
+ }
+
+ private def derivedPos(t: Tree, offset: Int): Position =
+ if (t.pos == NoPosition) NoPosition else t.pos.withPoint(offset)
+
+ private def derivedOffset(pos: Position): Int =
+ if (pos == NoPosition) -1 else pos.point
+
+ def apply(expr: Tree, selectors: List[Tree]): Import = {
+ val importSelectors = selectors.map {
+ case WildcardSelectorRepr(pos) => WildcardSelector(derivedOffset(pos))
+ case NameSelectorRepr(name, pos) => NameSelector(name, derivedOffset(pos))
+ case RenameSelectorRepr(name1, pos1, name2, pos2) => RenameSelector(name1, derivedOffset(pos1), name2, derivedOffset(pos2))
+ case UnimportSelectorRepr(name, pos) => UnimportSelector(name, derivedOffset(pos))
+ case tree => throw new IllegalArgumentException(s"${showRaw(tree)} doesn't correspond to import selector")
+ }
+ Import(expr, importSelectors)
+ }
+
+ def unapply(imp: Import): Some[(Tree, List[Tree])] = {
+ val selectors = imp.selectors.map {
+ case WildcardSelector(offset) => WildcardSelectorRepr(derivedPos(imp, offset))
+ case NameSelector(name, offset) => NameSelectorRepr(name, derivedPos(imp, offset))
+ case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2))
+ case UnimportSelector(name, offset) => UnimportSelectorRepr(name, derivedPos(imp, offset))
+ }
+ Some((imp.expr, selectors))
+ }
+ }
}
val build: BuildImpl = new BuildImpl
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 5b06239863..7a0c70caf6 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -30,12 +30,12 @@ trait Definitions extends api.StandardDefinitions {
private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
- clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
+ clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) markAllCompleted
}
private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val params = msym.newSyntheticValueParams(formals)
- msym setInfo MethodType(params, restpe)
+ msym setInfo MethodType(params, restpe) markAllCompleted
}
private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol =
owner.info.decls enter newMethod(owner, name, formals, restpe, flags)
@@ -251,8 +251,8 @@ trait Definitions extends api.StandardDefinitions {
}
// top types
- lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)
+ lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted
lazy val ObjectClass = getRequiredClass(sn.Object.toString)
// Cached types for core monomorphic classes
@@ -275,7 +275,7 @@ trait Definitions extends api.StandardDefinitions {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
anyval.info.decls enter av_constr
- anyval
+ anyval markAllCompleted
}).asInstanceOf[ClassSymbol]
def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
@@ -287,8 +287,10 @@ trait Definitions extends api.StandardDefinitions {
locally {
this initFlags ABSTRACT | FINAL
this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
+ this markAllCompleted
}
final override def isBottomClass = true
+ final override def isThreadsafe(purpose: SymbolOps): Boolean = true
}
final object NothingClass extends BottomClassSymbol(tpnme.Nothing, AnyClass) {
override def isSubClass(that: Symbol) = true
@@ -357,7 +359,7 @@ trait Definitions extends api.StandardDefinitions {
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
- lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL)
+ lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) markAllCompleted
lazy val SerializableClass = requiredClass[scala.Serializable]
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
@@ -625,6 +627,7 @@ trait Definitions extends api.StandardDefinitions {
def isBlackboxMacroBundleType(tp: Type) =
isMacroBundleType(tp) && (macroBundleParamInfo(tp) <:< BlackboxContextClass.tpe)
+ def isListType(tp: Type) = tp <:< classExistentialType(ListClass)
def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass)
// These "direct" calls perform no dealiasing. They are most needed when
@@ -1126,6 +1129,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val AnnotationDefaultAttr: ClassSymbol = {
val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L)
sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym)
+ markAllCompleted(sym)
RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match {
case existing :: _ =>
existing.asInstanceOf[ClassSymbol]
@@ -1225,7 +1229,7 @@ trait Definitions extends api.StandardDefinitions {
val tparam = clazz.newSyntheticTypeParam("T0", flags)
val parents = List(AnyRefTpe, parentFn(tparam))
- clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
+ clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) markAllCompleted
}
def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = {
@@ -1236,7 +1240,7 @@ trait Definitions extends api.StandardDefinitions {
case (_, restpe) => NullaryMethodType(restpe)
}
- msym setInfoAndEnter genPolyType(tparams, mtpe)
+ msym setInfoAndEnter genPolyType(tparams, mtpe) markAllCompleted
}
/** T1 means one type parameter.
diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala
index 1de8d425ad..7e9a568266 100644
--- a/src/reflect/scala/reflect/internal/FreshNames.scala
+++ b/src/reflect/scala/reflect/internal/FreshNames.scala
@@ -8,7 +8,7 @@ package internal
import scala.reflect.internal.util.FreshNameCreator
-trait FreshNames { self: Names =>
+trait FreshNames { self: Names with StdNames =>
// SI-6879 Keeps track of counters that are supposed to be globally unique
// as opposed to traditional freshers that are unique to compilation units.
val globalFreshNameCreator = new FreshNameCreator
@@ -17,8 +17,8 @@ trait FreshNames { self: Names =>
def currentFreshNameCreator: FreshNameCreator
// create fresh term/type name using implicit fresh name creator
- def freshTermName(prefix: String = "x$")(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
- def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
+ def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX)(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
+ def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
// Extractor that matches names which were generated by some
// FreshNameCreator with known prefix. Extracts user-specified
@@ -36,4 +36,4 @@ trait FreshNames { self: Names =>
else Some(NameTransformer.decode(sname.replaceFirst(quotedCreatorPrefix, "").replaceAll("\\d*$", "")))
}
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 483d0dd656..ff91b08ea1 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -4,6 +4,7 @@ package internal
import scala.collection.mutable.WeakHashMap
import scala.ref.WeakReference
+import scala.reflect.internal.Flags._
// SI-6241: move importers to a mirror
trait Importers extends api.Importers { to: SymbolTable =>
@@ -87,6 +88,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
}
my setInfo GenPolyType(mytypeParams, importType(theirCore))
my setAnnotations (their.annotations map importAnnotationInfo)
+ markAllCompleted(my)
}
}
} finally {
@@ -142,6 +144,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
}
symMap.weakUpdate(their, my)
+ markFlagsCompleted(my)(mask = AllFlags)
my setInfo recreatedSymbolCompleter(my, their)
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 4075653674..73ce59feb2 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -345,6 +345,13 @@ trait Names extends api.Names {
i += 1
i == prefix.length
}
+ final def startsWith(prefix: String, start: Int): Boolean = {
+ var i = 0
+ while (i < prefix.length && start + i < len &&
+ chrs(index + start + i) == prefix.charAt(i))
+ i += 1
+ i == prefix.length
+ }
/** Does this name end with suffix? */
final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
@@ -357,6 +364,13 @@ trait Names extends api.Names {
i += 1
i > suffix.length
}
+ final def endsWith(suffix: String, end: Int): Boolean = {
+ var i = 1
+ while (i <= suffix.length && i <= end &&
+ chrs(index + end - i) == suffix.charAt(suffix.length - i))
+ i += 1
+ i > suffix.length
+ }
final def containsName(subname: String): Boolean = containsName(newTermName(subname))
final def containsName(subname: Name): Boolean = {
@@ -382,9 +396,9 @@ trait Names extends api.Names {
final def startChar: Char = this charAt 0
final def endChar: Char = this charAt len - 1
final def startsWith(char: Char): Boolean = len > 0 && startChar == char
- final def startsWith(name: String): Boolean = startsWith(newTermName(name))
+ final def startsWith(name: String): Boolean = startsWith(name, 0)
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
- final def endsWith(name: String): Boolean = endsWith(newTermName(name))
+ final def endsWith(name: String): Boolean = endsWith(name, len)
/** Rewrite the confusing failure indication via result == length to
* the normal failure indication via result == -1.
@@ -443,9 +457,10 @@ trait Names extends api.Names {
}
/** TODO - find some efficiency. */
- def append(ch: Char) = newName("" + this + ch)
- def append(suffix: String) = newName("" + this + suffix)
- def append(suffix: Name) = newName("" + this + suffix)
+ def append(ch: Char) = newName(toString + ch)
+ def append(suffix: String) = newName(toString + suffix)
+ def append(suffix: Name) = newName(toString + suffix)
+ def append(separator: Char, suffix: Name) = newName(toString + separator + suffix)
def prepend(prefix: String) = newName("" + prefix + this)
def decodedName: ThisNameType = newName(decode)
@@ -463,7 +478,7 @@ trait Names extends api.Names {
*/
final class NameOps[T <: Name](name: T) {
import NameTransformer._
- def stripSuffix(suffix: String): T = stripSuffix(suffix: TermName)
+ def stripSuffix(suffix: String): T = if (name endsWith suffix) dropRight(suffix.length) else name // OPT avoid creating a Name with `suffix`
def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
def take(n: Int): T = name.subName(0, n).asInstanceOf[T]
def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
@@ -500,21 +515,21 @@ trait Names extends api.Names {
/** TermName_S and TypeName_S have fields containing the string version of the name.
* TermName_R and TypeName_R recreate it each time toString is called.
*/
- private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+ private final class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
override def newName(str: String): TermName = newTermNameCached(str)
}
- private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+ private final class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
override def newName(str: String): TypeName = newTypeNameCached(str)
}
- private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+ private final class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
override def toString = new String(chrs, index, len)
}
- private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+ private final class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
override def toString = new String(chrs, index, len)
}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 14db252a16..009bc39d4c 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -6,6 +6,9 @@ import settings.MutableSettings
trait Required { self: SymbolTable =>
def picklerPhase: Phase
+
+ def erasurePhase: Phase
+
def settings: MutableSettings
@deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 09fd996f39..139a79ffe1 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -36,6 +36,10 @@ trait StdAttachments {
*/
case object ForAttachment extends PlainAttachment
+ /** Identifies unit constants which were inserted by the compiler (e.g. gen.mkBlock)
+ */
+ case object SyntheticUnitAttachment extends PlainAttachment
+
/** Untyped list of subpatterns attached to selector dummy. */
case class SubpatternsAttachment(patterns: List[Tree])
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 7015105261..679186f938 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -53,14 +53,17 @@ trait StdNames {
*
* We obtain the formula:
*
- * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + suffixLength
*
- * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ * (+suffixLength for ".class" and potential module class suffix that is added *after* this transform).
+ *
+ * MaxNameLength can therefore be computed as follows:
*/
val marker = "$$$$"
+ val maxSuffixLength = "$.class".length + 1 // potential module class suffix and file extension
val MaxNameLength = math.min(
- settings.maxClassfileName.value - 6,
- 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
+ settings.maxClassfileName.value - maxSuffixLength,
+ 2 * (settings.maxClassfileName.value - maxSuffixLength - 2*marker.length - 32)
)
def toMD5(s: String, edge: Int): String = {
val prefix = s take edge
@@ -250,12 +253,13 @@ trait StdNames {
final val Quasiquote: NameType = "Quasiquote"
// quasiquote-specific names
- final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
- final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
- final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
- final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
final val QUASIQUOTE_PACKAGE_STAT: NameType = "$quasiquote$package$stat$"
+ final val QUASIQUOTE_PARAM: NameType = "$quasiquote$param$"
+ final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -291,22 +295,23 @@ trait StdNames {
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Base strings from which synthetic names are derived. */
- val BITMAP_PREFIX = "bitmap$"
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
- val DO_WHILE_PREFIX = "doWhile$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val EXPAND_SEPARATOR_STRING = "$$"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
- val PROTECTED_PREFIX = "protected$"
- val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SUPER_PREFIX_STRING = "super$"
- val WHILE_PREFIX = "while$"
- val FRESH_PREFIX = "fresh"
- val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names
+ val BITMAP_PREFIX = "bitmap$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val FRESH_TERM_NAME_PREFIX = "x$"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
+ val PROTECTED_PREFIX = "protected$"
+ val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
+ val SUPER_PREFIX_STRING = "super$"
+ val WHILE_PREFIX = "while$"
+ val FRESH_PREFIX = "fresh"
+ val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names
// Compiler internal names
val ANYname: NameType = "<anyname>"
@@ -577,6 +582,7 @@ trait StdNames {
val Flag : NameType = "Flag"
val FlagsRepr: NameType = "FlagsRepr"
val Ident: NameType = "Ident"
+ val ImplicitParams: NameType = "ImplicitParams"
val Import: NameType = "Import"
val Literal: NameType = "Literal"
val LiteralAnnotArg: NameType = "LiteralAnnotArg"
@@ -597,12 +603,14 @@ trait StdNames {
val SyntacticBlock: NameType = "SyntacticBlock"
val SyntacticClassDef: NameType = "SyntacticClassDef"
val SyntacticDefDef: NameType = "SyntacticDefDef"
+ val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree"
val SyntacticFilter: NameType = "SyntacticFilter"
val SyntacticFor: NameType = "SyntacticFor"
val SyntacticForYield: NameType = "SyntacticForYield"
val SyntacticFunction: NameType = "SyntacticFunction"
val SyntacticFunctionType: NameType = "SyntacticFunctionType"
val SyntacticIdent: NameType = "SyntacticIdent"
+ val SyntacticImport: NameType = "SyntacticImport"
val SyntacticMatch: NameType = "SyntacticMatch"
val SyntacticNew: NameType = "SyntacticNew"
val SyntacticObjectDef: NameType = "SyntacticObjectDef"
@@ -705,9 +713,9 @@ trait StdNames {
val materializeTypeTag: NameType = "materializeTypeTag"
val moduleClass : NameType = "moduleClass"
val mkAnnotation: NameType = "mkAnnotation"
- val mkRefineStat: NameType = "mkRefineStat"
val mkEarlyDef: NameType = "mkEarlyDef"
val mkPackageStat: NameType = "mkPackageStat"
+ val mkRefineStat: NameType = "mkRefineStat"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
@@ -752,6 +760,7 @@ trait StdNames {
val toArray: NameType = "toArray"
val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
+ val toStats: NameType = "toStats"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
@@ -816,31 +825,33 @@ trait StdNames {
def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
// ASCII names for operators
- val ADD = encode("+")
- val AND = encode("&")
- val ASR = encode(">>")
- val CONS = encode("::")
- val DIV = encode("/")
- val EQ = encode("==")
- val EQL = encode("=")
- val GE = encode(">=")
- val GT = encode(">")
- val HASHHASH = encode("##")
- val LE = encode("<=")
- val LSL = encode("<<")
- val LSR = encode(">>>")
- val LT = encode("<")
- val MINUS = encode("-")
- val MOD = encode("%")
- val MUL = encode("*")
- val NE = encode("!=")
- val OR = encode("|")
- val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
- val PLUSPLUS = encode("++")
- val SUB = MINUS // ... as does SUB with PLUS
- val XOR = encode("^")
- val ZAND = encode("&&")
- val ZOR = encode("||")
+ val ADD = encode("+")
+ val AND = encode("&")
+ val ASR = encode(">>")
+ val CONS = encode("::")
+ val COLONPLUS = encode(":+")
+ val DIV = encode("/")
+ val EQ = encode("==")
+ val EQL = encode("=")
+ val GE = encode(">=")
+ val GT = encode(">")
+ val HASHHASH = encode("##")
+ val LE = encode("<=")
+ val LSL = encode("<<")
+ val LSR = encode(">>>")
+ val LT = encode("<")
+ val MINUS = encode("-")
+ val MINGT = encode("->")
+ val MOD = encode("%")
+ val MUL = encode("*")
+ val NE = encode("!=")
+ val OR = encode("|")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val PLUSPLUS = encode("++")
+ val SUB = MINUS // ... as does SUB with PLUS
+ val XOR = encode("^")
+ val ZAND = encode("&&")
+ val ZOR = encode("||")
// unary operators
val UNARY_~ = encode("unary_~")
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index b538648b36..c088e8f57c 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -125,7 +125,7 @@ abstract class SymbolPairs {
* considered as a (lo, high) pair? Types always match. Term symbols
* match if their member types relative to `self` match.
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean
+ protected def matches(lo: Symbol, high: Symbol): Boolean
/** The parents and base classes of `base`. Can be refined in subclasses.
*/
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index f49ddaf6ca..2969bd92de 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -55,16 +55,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
new FreeTypeSymbol(name, origin) initFlags flags
- /** Determines whether the given information request should trigger the given symbol's completer.
- * See comments to `Symbol.needsInitialize` for details.
- */
- protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
- completer match {
- case null => false
- case _: FlagAgnosticCompleter => !isFlagRelated
- case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}")
- }
-
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
*/
@@ -106,18 +96,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def knownDirectSubclasses = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
children
}
def selfType = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
typeOfThis
}
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
+
+ // automatic full initialization on access to info from reflection API is a double-edged sword
+ // on the one hand, it's convenient so that the users don't have to deal with initialization themselves before printing out stuff
+ // (e.g. printing out a method's signature without fully initializing it would result in <_>'s for parameters
+ // on the other hand, this strategy can potentially cause unexpected effects due to being inconsistent with compiler's behavior
+ // so far I think user convenience outweighs the scariness, but we need to keep the tradeoff in mind
+ // NOTE: if you end up removing the call to fullyInitializeSymbol, consider that it would affect both runtime reflection and macros
def typeSignature: Type = { fullyInitializeSymbol(this); info }
def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
@@ -140,6 +139,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
with Annotatable[Symbol]
with Attachable {
+ // makes sure that all symbols that runtime reflection deals with are synchronized
+ private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol]
+ private def isAprioriThreadsafe = isThreadsafe(AllOps)
+ assert(isCompilerUniverse || isSynchronized || isAprioriThreadsafe, s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe")
+
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
@@ -609,20 +613,55 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
&& isTopLevel
&& nme.isReplWrapperName(name)
)
+
+ /** In our current architecture, symbols for top-level classes and modules
+ * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
+ * consider their job done.
+ *
+ * In order for such a dummy to provide meaningful info (e.g. a list of its members),
+ * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
+ * from ScalaSignature annotations and assigning it to symbols and types.
+ *
+ * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
+ * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
+ * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
+ * classes/modules nested into those and so on).
+ *
+ * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
+ * This happens because package symbols assign completer thunks to the dummies they create.
+ * Therefore metadata loading happens lazily and transparently.
+ *
+ * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
+ * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
+ * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
+ * produces incorrect results.
+ *
+ * One might think that the solution is simple: automatically call the completer
+ * whenever one needs flags, annotations and privateWithin - just like it's done for typeSignature.
+ * Unfortunately, this leads to weird crashes in scalac, and currently we can't attempt
+ * to fix the core of the compiler risk stability a few weeks before the final release.
+ * upd. Haha, "a few weeks before the final release". This surely sounds familiar :)
+ *
+ * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something
+ * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a
+ * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization.
+ */
final def getFlag(mask: Long): Long = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
flags & mask
}
/** Does symbol have ANY flag in `mask` set? */
final def hasFlag(mask: Long): Boolean = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
(flags & mask) != 0
}
def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong)
/** Does symbol have ALL the flags in `mask` set? */
final def hasAllFlags(mask: Long): Boolean = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
(flags & mask) == mask
}
@@ -789,7 +828,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
*/
- def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
@@ -943,12 +982,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
- /** Some completers call sym.setInfo when still in-flight and then proceed with initialization (e.g. see LazyPackageType)
- * setInfo sets _validTo to current period, which means that after a call to setInfo isInitialized will start returning true.
- * Unfortunately, this doesn't mean that info becomes ready to be used, because subsequent initialization might change the info.
- * Therefore we need this method to distinguish between initialized and really initialized symbol states.
+ /** We consider a symbol to be thread-safe, when multiple concurrent threads can call its methods
+ * (either directly or indirectly via public reflection or internal compiler infrastructure),
+ * without any locking and everything works as it should work.
+ *
+ * In its basic form, `isThreadsafe` always returns false. Runtime reflection augments reflection infrastructure
+ * with threadsafety-tracking mechanism implemented in `SynchronizedSymbol` that communicates with underlying completers
+ * and can sometimes return true if the symbol has been completed to the point of thread safety.
+ *
+ * The `purpose` parameter signifies whether we want to just check immutability of certain flags for the given mask.
+ * This is necessary to enable robust auto-initialization of `Symbol.flags` for runtime reflection, and is also quite handy
+ * in avoiding unnecessary initializations when requesting for flags that have already been set.
*/
- final def isFullyInitialized: Boolean = _validTo != NoPeriod && (flags & LOCKED) == 0
+ def isThreadsafe(purpose: SymbolOps): Boolean = false
+ def markFlagsCompleted(mask: Long): this.type = this
+ def markAllCompleted(): this.type = this
/** Can this symbol be loaded by a reflective mirror?
*
@@ -1102,7 +1150,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def fullNameInternal(separator: Char): Name = (
if (isRoot || isRootPackage || this == NoSymbol) name
else if (owner.isEffectiveRoot) name
- else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
+ else effectiveOwner.enclClass.fullNameAsName(separator) append (separator, name)
)
def fullNameAsName(separator: Char): Name = fullNameInternal(separator).dropLocal
@@ -1225,7 +1273,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
private[this] var _privateWithin: Symbol = _
def privateWithin = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
_privateWithin
}
def privateWithin_=(sym: Symbol) { _privateWithin = sym }
@@ -1483,46 +1532,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false }
}
- /** Called when the programmer requests information that might require initialization of the underlying symbol.
- *
- * `isFlagRelated` and `mask` describe the nature of this information.
- * isFlagRelated = true means that the programmer needs particular bits in flags.
- * isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin).
- *
- * In our current architecture, symbols for top-level classes and modules
- * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
- * consider their job done.
- *
- * In order for such a dummy to provide meaningful info (e.g. a list of its members),
- * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
- * from ScalaSignature annotations and assigning it to symbols and types.
- *
- * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
- * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
- * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
- * classes/modules nested into those and so on).
- *
- * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
- * This happens because package symbols assign completer thunks to the dummies they create.
- * Therefore metadata loading happens lazily and transparently.
- *
- * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
- * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
- * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
- * produces incorrect results.
- *
- * One might think that the solution is simple: automatically call the completer whenever one needs
- * flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this
- * leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler
- * risk stability a few weeks before the final release.
- *
- * However we do need to fix this for runtime reflection, since it's not something we'd like to
- * expose to reflection users. Therefore a proposed solution is to check whether we're in a
- * runtime reflection universe and if yes then to commence initialization.
- */
- protected def needsInitialize(isFlagRelated: Boolean, mask: Long) =
- !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
-
/** Was symbol's type updated during given phase? */
final def hasTypeAt(pid: Phase#Id): Boolean = {
assert(isCompilerUniverse)
@@ -1681,7 +1690,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the annotations attached to member a definition (class, method, type, field).
*/
def annotations: List[AnnotationInfo] = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
_annotations
}
@@ -3404,8 +3414,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the new list of info-adjusted symbols
*/
def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = {
- val syms1 = syms map symFn
- syms1 map (_ substInfo (syms, syms1))
+ val syms1 = mapList(syms)(symFn)
+ mapList(syms1)(_ substInfo (syms, syms1))
}
/** Derives a new Type by first deriving new symbols as in deriveSymbols,
@@ -3445,9 +3455,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the newly created, info-adjusted symbols
*/
def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
- cloneSymbols(syms) map (_ modifyInfo infoFn)
+ mapList(cloneSymbols(syms))(_ modifyInfo infoFn)
def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
- cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
+ mapList(cloneSymbolsAtOwner(syms, owner))(_ modifyInfo infoFn)
/** Functions which perform the standard clone/substituting on the given symbols and type,
* then call the creator function with the new symbols and type as arguments.
@@ -3510,6 +3520,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
Statistics.newView("#symbols")(ids)
+
+// -------------- Completion --------------------------------------------------------
+
+ // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe`
+ case class SymbolOps(isFlagRelated: Boolean, mask: Long)
+ val AllOps = SymbolOps(isFlagRelated = false, mask = 0L)
+ def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask)
+
+ private def relevantSymbols(syms: Seq[Symbol]) = syms.flatMap(sym => List(sym, sym.moduleClass, sym.sourceModule))
+ def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = relevantSymbols(syms).foreach(_.markFlagsCompleted(mask))
+ def markAllCompleted(syms: Symbol*): Unit = relevantSymbols(syms).foreach(_.markAllCompleted)
}
object SymbolsStats {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index b16cbd8325..29fdba2781 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -49,10 +49,10 @@ abstract class TreeGen extends macros.TreeBuilder {
mkMethodCall(Select(receiver, method), targs, args)
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
- Apply(mkTypeApply(target, targs map TypeTree), args)
+ Apply(mkTypeApply(target, mapList(targs)(TypeTree)), args)
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
- mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+ mkTypeApply(mkAttributedRef(method), mapList(targs)(TypeTree))
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -281,7 +281,7 @@ abstract class TreeGen extends macros.TreeBuilder {
case tree :: Nil if flattenUnary =>
tree
case _ =>
- Apply(scalaDot(TupleClass(elems.length).companionModule.name), elems)
+ Apply(scalaDot(TupleClass(elems.length).name.toTermName), elems)
}
def mkTupleType(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
@@ -342,7 +342,7 @@ abstract class TreeGen extends macros.TreeBuilder {
}
param
}
-
+
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
val gvdefs = evdefs map {
@@ -381,11 +381,11 @@ abstract class TreeGen extends macros.TreeBuilder {
}
constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
// Field definitions for the class - remove defaults.
-
+
val fieldDefs = vparamss.flatten map (vd => {
val field = copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree)
// Prevent overlapping of `field` end's position with default argument's start position.
- // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
+ // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
// the `pos` is a point position with all its values equal to `vd.rhs.pos.start`.
if(field.pos.isRange && vd.rhs.pos.isRange) field.pos = field.pos.withEnd(vd.rhs.pos.start - 1)
field
@@ -444,13 +444,23 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ /** Create a literal unit tree that is inserted by the compiler but not
+ * written by end user. It's important to distinguish the two so that
+ * quasiquotes can strip synthetic ones away.
+ */
+ def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment)
+
/** Create block of statements `stats` */
def mkBlock(stats: List[Tree]): Tree =
if (stats.isEmpty) Literal(Constant(()))
- else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
+ else if (!stats.last.isTerm) Block(stats, mkSyntheticUnit())
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
+ /** Create a block that wraps multiple statements but don't
+ * do any wrapping if there is just one statement. Used by
+ * quasiquotes, macro c.parse api and toolbox.
+ */
def mkTreeOrBlock(stats: List[Tree]) = stats match {
case Nil => EmptyTree
case head :: Nil => head
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 497a7c91b1..7bab15b0f4 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -284,17 +284,17 @@ abstract class TreeInfo {
/** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
* same object?
*/
- def isSelfConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
- case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSelfConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Ident(nme.CONSTRUCTOR) => true
+ case Select(This(_), nme.CONSTRUCTOR) => true
+ case _ => false
}
/** Is tree a super constructor call?
*/
- def isSuperConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSuperConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Select(Super(_, _), nme.CONSTRUCTOR) => true
+ case _ => false
}
/**
@@ -848,8 +848,10 @@ abstract class TreeInfo {
case _ => false
})
- def isMacroApplication(tree: Tree): Boolean =
- !tree.isDef && tree.symbol != null && tree.symbol.isTermMacro && !tree.symbol.isErroneous
+ def isMacroApplication(tree: Tree): Boolean = !tree.isDef && {
+ val sym = tree.symbol
+ sym != null && sym.isTermMacro && !sym.isErroneous
+ }
def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
case Block(_, expr) => isMacroApplicationOrBlock(expr)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 9ddaea4c62..4fbac235f4 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -899,7 +899,7 @@ trait Types
-1
}
- /** If this is a poly- or methodtype, a copy with cloned type / value parameters
+ /** If this is a ExistentialType, PolyType or MethodType, a copy with cloned type / value parameters
* owned by `owner`. Identity for all other types.
*/
def cloneInfo(owner: Symbol) = this
@@ -1969,15 +1969,15 @@ trait Types
def apply(value: Constant) = unique(new UniqueConstantType(value))
}
- private var _volatileRecursions: Int = 0
- def volatileRecursions = _volatileRecursions
- def volatileRecursions_=(value: Int) = _volatileRecursions = value
-
- private val _pendingVolatiles = new mutable.HashSet[Symbol]
- def pendingVolatiles = _pendingVolatiles
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ * with synchronized, because they are accessed only from isVolatile, which is called only from
+ * Typer.
+ */
+ private var volatileRecursions: Int = 0
+ private val pendingVolatiles = new mutable.HashSet[Symbol]
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
- require(args0.nonEmpty, this)
+ require(args0 ne Nil, this)
/** No unapplied type params size it has (should have) equally as many args. */
override def isHigherKinded = false
@@ -1988,12 +1988,39 @@ trait Types
// too little information is known to determine its kind, and
// it later turns out not to have kind *. See SI-4070. Only
// logging it for now.
- if (sym.typeParams.size != args.size)
+ val tparams = sym.typeParams
+ if (tparams.size != args.size)
devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
-
- val GenPolyType(tparams, result) = asSeenFromOwner(tp)
- assert((tparams eq Nil) || tparams == sym.typeParams, (tparams, sym.typeParams))
- result.instantiateTypeParams(sym.typeParams, args)
+ def asSeenFromInstantiated(tp: Type) =
+ asSeenFromOwner(tp).instantiateTypeParams(tparams, args)
+ // If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
+ // type, we can end up with new symbols for the type parameters (clones from TypeMap).
+ // The subsequent substitution of type arguments would fail. This problem showed up during
+ // the fix for SI-8046, however the solution taken there wasn't quite right, and led to
+ // SI-8170.
+ //
+ // Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
+ // on the result type.
+ //
+ // TODO: Revisit this and explore the questions raised:
+ //
+ // AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
+ // JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
+ // In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
+ // `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
+ // But I thought it was better to retain the kind.
+ // AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
+ // In general, it seems iffy the tparams can never occur in the result
+ // then we might as well represent the type as a no-arg typeref.
+ // AM: I've also been trying to track down uses of transform (pretty generic name for something that
+ // does not seem that widely applicable).
+ // It's kind of a helper for computing baseType (since it tries to propagate our type args to some
+ // other type, which has to be related to this type for that to make sense).
+ //
+ tp match {
+ case PolyType(`tparams`, result) => PolyType(tparams, asSeenFromInstantiated(result))
+ case _ => asSeenFromInstantiated(tp)
+ }
}
// note: does not go through typeRef. There's no need to because
@@ -2035,7 +2062,7 @@ trait Types
// to a java or scala symbol, but it does matter whether it occurs in java or scala code.
// TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
// represented as existential types.
- override def isHigherKinded = typeParams.nonEmpty
+ override def isHigherKinded = (typeParams ne Nil)
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
@@ -2221,7 +2248,7 @@ trait Types
//OPT specialize hashCode
override final def computeHashCode = {
import scala.util.hashing.MurmurHash3._
- val hasArgs = args.nonEmpty
+ val hasArgs = args ne Nil
var h = productSeed
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
@@ -2412,7 +2439,7 @@ trait Types
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
- if (args.nonEmpty) {
+ if (args ne Nil) {
if (sym.isAliasType) new AliasArgsTypeRef(pre, sym, args)
else if (sym.isAbstractType) new AbstractArgsTypeRef(pre, sym, args)
else new ClassArgsTypeRef(pre, sym, args)
@@ -2485,7 +2512,7 @@ trait Types
true
}
- def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isImplicit = (params ne Nil) && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
@@ -2493,7 +2520,7 @@ trait Types
override def paramss: List[List[Symbol]] = params :: resultType.paramss
- override def paramTypes = params map (_.tpe)
+ override def paramTypes = mapList(params)(symTpe) // OPT use mapList rather than .map
override def boundSyms = resultType.boundSyms ++ params
@@ -2656,8 +2683,55 @@ trait Types
override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
override def isHigherKinded = false
- override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
+ /** [SI-6169, SI-8197 -- companion to SI-1786]
+ *
+ * Approximation to improve the bounds of a Java-defined existential type,
+ * based on the bounds of the type parameters of the quantified type
+ * In Scala syntax, given a java-defined class C[T <: String], the existential type C[_]
+ * is improved to C[_ <: String] before skolemization, which captures (get it?) what Java does:
+ * enter the type paramers' bounds into the context when checking subtyping/type equality of existential types
+ *
+ * (Also tried doing this once during class file parsing or when creating the existential type,
+ * but that causes cyclic errors because it happens too early.)
+ */
+ private def sharpenQuantifierBounds(): Unit = {
+ /* Check that we're looking at rawToExistential's handiwork
+ * (`existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))`).
+ * We can't do this sharpening there because we'll run into cycles.
+ */
+ def rawToExistentialCreatedMe = (quantified corresponds underlying.typeArgs){ (q, a) => q.tpe =:= a }
+
+ if (underlying.typeSymbol.isJavaDefined && rawToExistentialCreatedMe) {
+ val tpars = underlying.typeSymbol.initialize.typeParams // TODO: is initialize needed?
+ debuglog(s"sharpen bounds: $this | ${underlying.typeArgs.map(_.typeSymbol)} <-- ${tpars.map(_.info)}")
+
+ foreach2(quantified, tpars) { (quant, tparam) =>
+ // TODO: check `tparam.info.substSym(tpars, quantified) <:< quant.info` instead (for some weird reason not working for test/t6169/ExistF)
+ // for now, crude approximation for the common case
+ if (quant.info.bounds.isEmptyBounds && !tparam.info.bounds.isEmptyBounds) {
+ // avoid creating cycles [pos/t2940] that consist of an existential quantifier's
+ // bounded by an existential type that unhygienically has that quantifier as its own quantifier
+ // (TODO: clone latter existential with fresh quantifiers -- not covering this case for now)
+ if ((existentialsInType(tparam.info) intersect quantified).isEmpty)
+ quant setInfo tparam.info.substSym(tpars, quantified)
+ }
+ }
+ }
+
+ _sharpenQuantifierBounds = false
+ }
+ private[this] var _sharpenQuantifierBounds = true
+
+ override def skolemizeExistential(owner: Symbol, origin: AnyRef) = {
+ // do this here because it's quite close to what Java does:
+ // when checking subtyping/type equality, enter constraints
+ // derived from the existentially quantified type into the typing environment
+ // (aka \Gamma, which tracks types for variables and constraints/kinds for types)
+ // as a nice bonus, delaying this until we need it avoids cyclic errors
+ if (_sharpenQuantifierBounds) sharpenQuantifierBounds
+
deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
+ }
private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
case TypeRef(_, sym, _) if (qset contains sym) =>
@@ -2878,6 +2952,8 @@ trait Types
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
+
+ override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]")
}
trait UntouchableTypeVar extends TypeVar {
@@ -2998,15 +3074,19 @@ trait Types
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
assert(tp != this, tp) // implies there is a cycle somewhere (?)
//println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addLoBound(tp, isNumericBound)
+ if (!sharesConstraints(tp)) {
+ undoLog record this
+ constr.addLoBound(tp, isNumericBound)
+ }
}
def addHiBound(tp: Type, isNumericBound: Boolean = false) {
// assert(tp != this)
//println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addHiBound(tp, isNumericBound)
+ if (!sharesConstraints(tp)) {
+ undoLog record this
+ constr.addHiBound(tp, isNumericBound)
+ }
}
// </region>
@@ -3018,6 +3098,16 @@ trait Types
case ConstantTrue => true
case tv: TypeVar => tv.suspended
}
+
+ /** `AppliedTypeVar`s share the same `TypeConstraint` with the `HKTypeVar` that it was spawned from.
+ * A type inference session can also have more than one ATV.
+ * If we don't detect that, we end up with "cyclic constraint" when we try to instantiate type parameters
+ * after solving in, pos/t8237
+ */
+ protected final def sharesConstraints(other: Type): Boolean = other match {
+ case other: TypeVar => constr == other.constr // SI-8237 avoid cycles. Details in pos/t8237.scala
+ case _ => false
+ }
private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse
// SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
@@ -4120,7 +4210,7 @@ trait Types
&& (variance.isCovariant || isSubType(t2, t1, depth))
)
- corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
+ corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
}
def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
@@ -4304,7 +4394,7 @@ trait Types
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
- tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
+ mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
@@ -4553,7 +4643,10 @@ trait Types
private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
- private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+ private[scala] val treeTpe = (t: Tree) => t.tpe
+ private[scala] val symTpe = (sym: Symbol) => sym.tpe
+ private[scala] val symInfo = (sym: Symbol) => sym.info
+ private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations ne Nil
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 2a19441476..42f794736a 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -275,6 +275,7 @@ abstract class UnPickler {
def pflags = flags & PickledFlags
def finishSym(sym: Symbol): Symbol = {
+ markFlagsCompleted(sym)(mask = AllFlags)
sym.privateWithin = privateWithin
sym.info = (
if (atEnd) {
@@ -663,7 +664,7 @@ abstract class UnPickler {
private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter {
private val definedAtRunId = currentRunId
private val p = phase
- override def complete(sym: Symbol) : Unit = try {
+ protected def completeInternal(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
if (p ne null)
slowButSafeEnteringPhase(p) (sym setInfo tp)
@@ -673,6 +674,10 @@ abstract class UnPickler {
catch {
case e: MissingRequirementError => throw toTypeError(e)
}
+ override def complete(sym: Symbol) : Unit = {
+ completeInternal(sym)
+ if (!isCompilerUniverse) markAllCompleted(sym)
+ }
override def load(sym: Symbol) { complete(sym) }
}
@@ -680,8 +685,9 @@ abstract class UnPickler {
* of completed symbol to symbol at index `j`.
*/
private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
- override def complete(sym: Symbol) = try {
- super.complete(sym)
+ override def completeInternal(sym: Symbol) = try {
+ super.completeInternal(sym)
+
var alias = at(j, readSymbol)
if (alias.isOverloaded)
alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 28afd18fe0..816916787e 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -51,6 +51,8 @@ abstract class MutableSettings extends AbsSettings {
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
+
+ def isScala211: Boolean
}
object MutableSettings {
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 3c4e93f11d..f9b10c90be 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -173,11 +173,11 @@ trait TypeComparers {
// SI-2066 This prevents overrides with incompatible variance in higher order type parameters.
private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = {
def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod)
- ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
+ !settings.isScala211 || ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
}
private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) =
- methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
+ !settings.isScala211 || methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
def isSameType2(tp1: Type, tp2: Type): Boolean = {
def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index e2159d30f5..564cbb1ce3 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -257,6 +257,12 @@ private[internal] trait TypeConstraints {
// println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tv => tv.instWithinBounds || util.andFalse(log(s"Inferred type for ${tv.originString} does not conform to bounds: ${tv.constr}")))
+
+ def logBounds(tv: TypeVar) = log {
+ val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}"
+ s"Inferred type for ${tv.originString} (${tv.inst}) $what"
+ }
+
+ tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv)))
}
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 09f4389b82..f427813c01 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -861,7 +861,7 @@ private[internal] trait TypeMaps {
class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
private val actuals = actuals0.toIndexedSeq
private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+ def existentialsNeeded: List[Symbol] = existentials.iterator.filter(_ ne null).toList
private object StableArg {
def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
new file mode 100644
index 0000000000..f0c7d0f050
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
@@ -0,0 +1,19 @@
+package scala.reflect
+package internal
+package transform
+
+trait PostErasure {
+ val global: SymbolTable
+ import global._
+ import definitions._
+
+ object elimErasedValueType extends TypeMap {
+ def apply(tp: Type) = tp match {
+ case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
+ case ErasedValueType(_, underlying) => underlying
+ case _ => mapOver(tp)
+ }
+ }
+
+ def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index fa185db22f..296ccde443 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -26,17 +26,20 @@ trait Transforms { self: SymbolTable =>
private val refChecksLazy = new Lazy(new { val global: Transforms.this.type = self } with RefChecks)
private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry)
private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure)
+ private val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure)
def refChecks = refChecksLazy.force
def uncurry = uncurryLazy.force
def erasure = erasureLazy.force
+ def postErasure = postErasureLazy.force
def transformedType(sym: Symbol) =
- erasure.transformInfo(sym,
- uncurry.transformInfo(sym,
- refChecks.transformInfo(sym, sym.info)))
+ postErasure.transformInfo(sym,
+ erasure.transformInfo(sym,
+ uncurry.transformInfo(sym,
+ refChecks.transformInfo(sym, sym.info))))
def transformedType(tpe: Type) =
- erasure.scalaErasure(uncurry.uncurry(tpe))
+ postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe)))
}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 7cc2952c96..d128521be8 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -47,6 +47,30 @@ trait Collections {
final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f)
final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f)
+ /** A version of List#map, specialized for List, and optimized to avoid allocation if `as` is empty */
+ final def mapList[A, B](as: List[A])(f: A => B): List[B] = if (as eq Nil) Nil else {
+ val head = new ::[B](f(as.head), Nil)
+ var tail: ::[B] = head
+ var rest = as.tail
+ while (rest ne Nil) {
+ val next = new ::(f(rest.head), Nil)
+ tail.tl = next
+ tail = next
+ rest = rest.tail
+ }
+ head
+ }
+
+ final def collectFirst[A, B](as: List[A])(pf: PartialFunction[A, B]): Option[B] = {
+ @tailrec
+ def loop(rest: List[A]): Option[B] = rest match {
+ case Nil => None
+ case a :: as if pf.isDefinedAt(a) => Some(pf(a))
+ case a :: as => loop(as)
+ }
+ loop(as)
+ }
+
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
var ys1 = xs1
@@ -99,15 +123,19 @@ trait Collections {
else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
}
final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
- val lb = new ListBuffer[C]
+ var lb: ListBuffer[C] = null
var ys1 = xs1
var ys2 = xs2
while (!ys1.isEmpty && !ys2.isEmpty) {
- lb ++= f(ys1.head, ys2.head)
+ val cs = f(ys1.head, ys2.head)
+ if (cs ne Nil) {
+ if (lb eq null) lb = new ListBuffer[C]
+ lb ++= cs
+ }
ys1 = ys1.tail
ys2 = ys2.tail
}
- lb.toList
+ if (lb eq null) Nil else lb.result
}
final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index 9866b043bb..8791d8eb23 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -40,8 +40,8 @@ abstract class SourceFile {
def lineToString(index: Int): String = {
val start = lineToOffset(index)
var end = start
- while (!isEndOfLine(end)) end += 1
- content.slice(start, end) mkString ""
+ while (!isEndOfLine(end) && end <= length) end += 1
+ new String(content, start, end - start)
}
@tailrec
@@ -136,14 +136,20 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
private def charAtIsEOL(idx: Int)(p: Char => Boolean) = {
// don't identify the CR in CR LF as a line break, since LF will do.
- def notCRLF0 = content(idx) != CR || idx + 1 >= length || content(idx + 1) != LF
+ def notCRLF0 = content(idx) != CR || !content.isDefinedAt(idx + 1) || content(idx + 1) != LF
idx < length && notCRLF0 && p(content(idx))
}
def isLineBreak(idx: Int) = charAtIsEOL(idx)(isLineBreakChar)
- def isEndOfLine(idx: Int) = charAtIsEOL(idx) {
+ /** True if the index is included by an EOL sequence. */
+ def isEndOfLine(idx: Int) = (content isDefinedAt idx) && PartialFunction.cond(content(idx)) {
+ case CR | LF => true
+ }
+
+ /** True if the index is end of an EOL sequence. */
+ def isAtEndOfLine(idx: Int) = charAtIsEOL(idx) {
case CR | LF => true
case _ => false
}
@@ -151,7 +157,7 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
def calculateLineIndices(cs: Array[Char]) = {
val buf = new ArrayBuffer[Int]
buf += 0
- for (i <- 0 until cs.length) if (isEndOfLine(i)) buf += i + 1
+ for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1
buf += cs.length // sentinel, so that findLine below works smoother
buf.toArray
}
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 039e75fbee..5ccdc15a03 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -43,7 +43,7 @@ abstract class Attachments { self =>
/** Check underlying payload contains an instance of type `T`. */
def contains[T: ClassTag]: Boolean =
- all exists matchesTag[T]
+ !isEmpty && (all exists matchesTag[T])
/** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
* Replaces an existing payload of the same type, if exists.
@@ -57,6 +57,8 @@ abstract class Attachments { self =>
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
else new NonemptyAttachments[Pos](this.pos, newAll)
}
+
+ def isEmpty: Boolean = true
}
// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
@@ -64,4 +66,5 @@ abstract class Attachments { self =>
private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
type Pos = P
def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+ override def isEmpty: Boolean = false
}
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
index 222ae43d79..68e07dd319 100644
--- a/src/reflect/scala/reflect/macros/Evals.scala
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -17,13 +17,13 @@ trait Evals {
* permitted by the shape of the arguments.
*
* Known issues: because of [[https://issues.scala-lang.org/browse/SI-5748 https://issues.scala-lang.org/browse/SI-5748]]
- * trees being evaluated first need to undergo `resetAllAttrs`. Resetting symbols and types
+ * trees being evaluated first need to undergo `untypecheck`. Resetting symbols and types
* mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first.
*
* {{{
* scala> def impl(c: Context)(x: c.Expr[String]) = {
- * | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate))
- * | println(s"compile-time value is: \${c.eval(x1)}")
+ * | val x1 = c.Expr[String](c.untypecheck(x.tree.duplicate))
+ * | println(s"compile-time value is: ${c.eval(x1)}")
* | x
* | }
* impl: (c: Context)(x: c.Expr[String])c.Expr[String]
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index 87de442921..6c077de1d2 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -68,12 +68,6 @@ trait Typers {
*/
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
- /** Recursively resets symbols and types in a given tree.
- * WARNING: Don't use this API, go for [[untypecheck]] instead.
- */
- @deprecated("Use `c.untypecheck` instead", "2.11.0")
- def resetAllAttrs(tree: Tree): Tree
-
/** Recursively resets locally defined symbols and types in a given tree.
* WARNING: Don't use this API, go for [[untypecheck]] instead.
*/
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index d84e6aa737..bc5c8b2840 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -122,7 +122,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def setType(tp: Type): Tree
/** Like `setType`, but if this is a previously empty TypeTree that
- * fact is remembered so that resetAllAttrs will snap back.
+ * fact is remembered so that `untypecheck` will snap back.
*
* \@PP: Attempting to elaborate on the above, I find: If defineType
* is called on a TypeTree whose type field is null or NoType,
@@ -130,7 +130,8 @@ abstract class Universe extends scala.reflect.api.Universe {
* ResetAttrsTraverser, which nulls out the type field of TypeTrees
* for which wasEmpty is true, leaving the others alone.
*
- * resetAllAttrs is used in situations where some speculative
+ * `untypecheck` (or `resetAttrs` in compiler parlance) is used
+ * in situations where some speculative
* typing of a tree takes place, fails, and the tree needs to be
* returned to its former state to try again. So according to me:
* using `defineType` instead of `setType` is how you communicate
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 7cc5176507..bc95b839b7 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -16,7 +16,7 @@ import java.io.IOException
import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor }
import internal.pickling.ByteCodecs
import internal.pickling.UnPickler
-import scala.collection.mutable.{ HashMap, ListBuffer }
+import scala.collection.mutable.{ HashMap, ListBuffer, ArrayBuffer }
import internal.Flags._
import ReflectionUtils._
import scala.language.existentials
@@ -42,7 +42,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// overriden by ReflectGlobal
def rootClassLoader: ClassLoader = this.getClass.getClassLoader
- trait JavaClassCompleter extends FlagAssigningCompleter
+ trait JavaClassCompleter
def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized {
mirrors get cl match {
@@ -63,10 +63,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private[reflect] lazy val runDefinitions = new definitions.RunDefinitions // only one "run" in the reflection universe
import runDefinitions._
- override lazy val RootPackage = new RootPackage with SynchronizedTermSymbol
- override lazy val RootClass = new RootClass with SynchronizedModuleClassSymbol
- override lazy val EmptyPackage = new EmptyPackage with SynchronizedTermSymbol
- override lazy val EmptyPackageClass = new EmptyPackageClass with SynchronizedModuleClassSymbol
+ override lazy val RootPackage = (new RootPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val RootClass = (new RootClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val EmptyPackage = (new EmptyPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val EmptyPackageClass = (new EmptyPackageClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
/** The lazy type for root.
*/
@@ -118,15 +118,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def abort(msg: String) = throw new ScalaReflectionException(msg)
- private def ErrorInnerClass(sym: Symbol) = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
- private def ErrorInnerModule(sym: Symbol) = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
- private def ErrorStaticClass(sym: Symbol) = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
- private def ErrorStaticModule(sym: Symbol) = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
- private def ErrorNotMember(sym: Symbol, owner: Symbol) = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
- private def ErrorNotField(sym: Symbol) = abort(s"expected a field or an accessor method symbol, you provided $sym")
- private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = abort(s"expected a constructor of $owner, you provided $sym")
- private def ErrorFree(member: Symbol, freeType: Symbol) = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
- private def ErrorNonExistentField(sym: Symbol) = abort(
+ private def ErrorInnerClass(sym: Symbol) = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = abort(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = abort(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorArrayConstructor(sym: Symbol, owner: Symbol) = abort(s"Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+ private def ErrorNonExistentField(sym: Symbol) = abort(
sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
|note that private parameters of class constructors don't get mapped onto fields and/or accessors,
|unless they are used outside of their declaring constructors.""")
@@ -221,6 +222,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) {
if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner)
+ if (owner == ArrayClass) ErrorArrayConstructor(sym, owner)
ensuringNotFree(sym) {
if (!owner.info.decls.toList.contains(sym)) ErrorNotConstructor(sym, owner)
}
@@ -247,7 +249,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
def reflectMethod(method: MethodSymbol): MethodMirror = {
checkMemberOf(method, symbol)
- mkJavaMethodMirror(instance, method)
+ mkMethodMirror(instance, method)
}
def reflectClass(cls: ClassSymbol): ClassMirror = {
if (cls.isStatic) ErrorStaticClass(cls)
@@ -262,16 +264,35 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def toString = s"instance mirror for $instance"
}
- private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol)
+ // caches value class metadata, so that we minimize the work that needs to be done during Mirror.apply
+ private class DerivedValueClassMetadata(info: Type) {
+ val symbol = info.typeSymbol
+ val isDerivedValueClass = symbol.isDerivedValueClass
+ lazy val boxer = runtimeClass(symbol.toType).getDeclaredConstructors().head
+ lazy val unboxer = {
+ val fields @ (field :: _) = symbol.toType.declarations.collect{ case ts: TermSymbol if ts.isParamAccessor && ts.isMethod => ts }.toList
+ assert(fields.length == 1, s"$symbol: $fields")
+ runtimeClass(symbol.asClass).getDeclaredMethod(field.name.toString)
+ }
+ }
+
+ private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol, metadata: DerivedValueClassMetadata)
extends FieldMirror {
+ def this(receiver: Any, symbol: TermSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.info))
+ def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol, metadata)
+ import metadata._
+
lazy val jfield = ensureAccessible(fieldToJava(symbol))
- def get = jfield get receiver
+ def get = {
+ val value = jfield get receiver
+ if (isDerivedValueClass) boxer.newInstance(value) else value
+ }
def set(value: Any) = {
// it appears useful to be able to set values of vals, therefore I'm disabling this check
// if (!symbol.isMutable) ErrorSetImmutableField(symbol)
- jfield.set(receiver, value)
+ jfield.set(receiver, if (isDerivedValueClass) unboxer.invoke(value) else value)
}
- def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol)
+
override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)"
}
@@ -312,13 +333,17 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
+ private def isByNameParam(p: Type) = isByNameParamType(p)
+ private def isValueClassParam(p: Type) = p.typeSymbol.isDerivedValueClass
+
// unlike other mirrors, method mirrors are created by a factory
// that's because we want to have decent performance
// therefore we move special cases into separate subclasses
// rather than have them on a hot path them in a unified implementation of the `apply` method
- private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
- if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
- else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
+ private def mkMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): MethodMirror = {
+ def existsParam(pred: Type => Boolean) = symbol.paramss.flatten.map(_.info).exists(pred)
+ if (isBytecodelessMethod(symbol)) new BytecodelessMethodMirror(receiver, symbol)
+ else if (existsParam(isByNameParam) || existsParam(isValueClassParam)) new JavaTransformingMethodMirror(receiver, symbol)
else {
symbol.paramss.flatten.length match {
case 0 => new JavaVanillaMethodMirror0(receiver, symbol)
@@ -330,68 +355,123 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
}
}
- private abstract class JavaMethodMirror(val symbol: MethodSymbol) extends MethodMirror {
+
+ private abstract class JavaMethodMirror(val symbol: MethodSymbol, protected val ret: DerivedValueClassMetadata) extends MethodMirror {
lazy val jmeth = ensureAccessible(methodToJava(symbol))
- def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+ lazy val jconstr = ensureAccessible(constructorToJava(symbol))
- def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
- val result = jinvokeraw(jmeth, receiver, args)
- if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+ else if (receiver == null) jconstr.newInstance(args.asInstanceOf[Seq[AnyRef]]: _*)
+ else jconstr.newInstance((receiver +: args).asInstanceOf[Seq[AnyRef]]: _*)
+ def jinvoke(args: Seq[Any]): Any = {
+ val result = jinvokeraw(args)
+ if (!symbol.isConstructor && jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else if (!symbol.isConstructor && ret.isDerivedValueClass) ret.boxer.newInstance(result.asInstanceOf[AnyRef])
else result
}
- override def toString = s"method mirror for ${showMethodSig(symbol)} (bound to $receiver)"
- }
-
- private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol)
- extends JavaMethodMirror(symbol) {
- def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol)
- def apply(args: Any*): Any = jinvoke(jmeth, receiver, args)
- }
-
- private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol)
- extends JavaVanillaMethodMirror(receiver, symbol) {
- override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol)
- override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver)
- }
-
- private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol)
- extends JavaVanillaMethodMirror(receiver, symbol) {
- override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol)
- override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef])
- }
-
- private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol)
- extends JavaVanillaMethodMirror(receiver, symbol) {
- override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol)
- override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
- }
-
- private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol)
- extends JavaVanillaMethodMirror(receiver, symbol) {
- override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol)
- override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ override def toString = {
+ val what = if (symbol.isConstructor) "constructor mirror" else "method mirror"
+ s"$what for ${showMethodSig(symbol)} (bound to $receiver)"
+ }
}
- private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol)
- extends JavaVanillaMethodMirror(receiver, symbol) {
- override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol)
- override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
- }
+ private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaMethodMirror(symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol, ret)
+ def apply(args: Any*): Any = jinvoke(args)
+ }
+
+ private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol, ret)
+ override def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver)
+ else if (receiver == null) jconstr.newInstance()
+ else jconstr.newInstance(receiver.asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol, ret)
+ override def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef])
+ else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef])
+ else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol, ret)
+ override def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+ else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+ else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol, ret)
+ override def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol, ret: DerivedValueClassMetadata)
+ extends JavaVanillaMethodMirror(receiver, symbol, ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new DerivedValueClassMetadata(symbol.returnType))
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol, ret)
+ override def jinvokeraw(args: Seq[Any]) =
+ if (!symbol.isConstructor) jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+ else if (receiver == null) jconstr.newInstance(args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+ else jconstr.newInstance(receiver.asInstanceOf[AnyRef], args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+ }
+
+ // caches MethodSymbol metadata, so that we minimize the work that needs to be done during Mirror.apply
+ // TODO: vararg is only supported in the last parameter list (SI-6182), so we don't need to worry about the rest for now
+ private class MethodMetadata(symbol: MethodSymbol) {
+ private val params = symbol.paramss.flatten.toArray
+ private val vcMetadata = params.map(p => new DerivedValueClassMetadata(p.info))
+ val isByName = params.map(p => isByNameParam(p.info))
+ def isDerivedValueClass(i: Int) = vcMetadata(i).isDerivedValueClass
+ def paramUnboxers(i: Int) = vcMetadata(i).unboxer
+ val paramCount = params.length
+ val ret = new DerivedValueClassMetadata(symbol.returnType)
+ }
+
+ private class JavaTransformingMethodMirror(val receiver: Any, symbol: MethodSymbol, metadata: MethodMetadata)
+ extends JavaMethodMirror(symbol, metadata.ret) {
+ def this(receiver: Any, symbol: MethodSymbol) = this(receiver, symbol, new MethodMetadata(symbol))
+ override def bind(newReceiver: Any) = new JavaTransformingMethodMirror(newReceiver, symbol, metadata)
+ import metadata._
- private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol)
- extends JavaMethodMirror(symbol) {
- def bind(newReceiver: Any) = new JavaByNameMethodMirror(newReceiver, symbol)
def apply(args: Any*): Any = {
- val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg)
- jinvoke(jmeth, receiver, transformed)
+ val args1 = new Array[Any](args.length)
+ var i = 0
+ while (i < args1.length) {
+ val arg = args(i)
+ if (i >= paramCount) args1(i) = arg // don't transform varargs
+ else if (isByName(i)) args1(i) = () => arg // don't transform by-name value class params
+ else if (isDerivedValueClass(i)) args1(i) = paramUnboxers(i).invoke(arg)
+ i += 1
+ }
+ jinvoke(args1)
}
}
- private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
- extends JavaMethodMirror(symbol) {
- def bind(newReceiver: Any) = new JavaBytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol)
- def apply(args: Any*): Any = {
+ private class BytecodelessMethodMirror[T: ClassTag](val receiver: T, val symbol: MethodSymbol)
+ extends MethodMirror {
+ def bind(newReceiver: Any) = new BytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol)
+ override def toString = s"bytecodeless method mirror for ${showMethodSig(symbol)} (bound to $receiver)"
+
+ def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
// actually it's not even necessary, because we manually dispatch arguments below
val params = symbol.paramss.flatten
@@ -414,7 +494,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
assert(jmeths.length == 1, jmeths.toList)
- jinvoke(jmeths.head, null, objReceiver +: objArgs)
+ val jmeth = jmeths.head
+ val result = jmeth.invoke(null, (objReceiver +: objArgs).asInstanceOf[Seq[AnyRef]]: _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
}
symbol match {
@@ -445,23 +528,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
}
- private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol)
- extends MethodMirror {
- def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol)
- override val receiver = outer
- lazy val jconstr = ensureAccessible(constructorToJava(symbol))
- def apply(args: Any*): Any = {
- if (symbol.owner == ArrayClass)
- abort("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
-
- val effectiveArgs =
- if (outer == null) args.asInstanceOf[Seq[AnyRef]]
- else outer +: args.asInstanceOf[Seq[AnyRef]]
- jconstr.newInstance(effectiveArgs: _*)
- }
- override def toString = s"constructor mirror for ${showMethodSig(symbol)} (bound to $outer)"
- }
-
private abstract class JavaTemplateMirror
extends TemplateMirror {
def outer: AnyRef
@@ -474,7 +540,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def isStatic = false
def reflectConstructor(constructor: MethodSymbol) = {
checkConstructorOf(constructor, symbol)
- new JavaConstructorMirror(outer, constructor)
+ mkMethodMirror(outer, constructor)
}
override def toString = s"class mirror for ${symbol.fullName} (bound to $outer)"
}
@@ -575,6 +641,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val bytes = ssig.getBytes
val len = ByteCodecs.decode(bytes)
unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
+ markAllCompleted(clazz, module)
case None =>
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
@@ -583,6 +650,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val len = ByteCodecs.decode(encoded)
val decoded = encoded.take(len)
unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
+ markAllCompleted(clazz, module)
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
@@ -605,6 +673,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName))
.setInfo(new TypeParamCompleter(jtvar))
+ markFlagsCompleted(tparam)(mask = AllFlags)
tparamCache enter (jtvar, tparam)
tparam
}
@@ -617,6 +686,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol) = complete(sym)
override def complete(sym: Symbol) = {
sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny))
+ markAllCompleted(sym)
}
}
@@ -655,7 +725,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* @param module The Scala companion object for which info is copied
* @param jclazz The Java class
*/
- private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter {
+ private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAgnosticCompleter {
+ // one doesn't need to do non-trivial computations to assign flags for Java-based reflection artifacts
+ // therefore I'm moving flag-assigning logic from completion to construction
+ val flags = jclazz.scalaFlags
+ clazz setFlag (flags | JAVA)
+ if (module != NoSymbol) {
+ module setFlag (flags & PRIVATE | JAVA)
+ module.moduleClass setFlag (flags & PRIVATE | JAVA)
+ }
+ markFlagsCompleted(clazz, module)(mask = AllFlags)
/** used to avoid cycles while initializing classes */
private var parentsLevel = 0
@@ -665,12 +744,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol): Unit = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
- val flags = jclazz.scalaFlags
- clazz setFlag (flags | JAVA)
- if (module != NoSymbol) {
- module setFlag (flags & PRIVATE | JAVA)
- module.moduleClass setFlag (flags & PRIVATE | JAVA)
- }
propagatePackageBoundary(jclazz, relatedSymbols: _*)
copyAnnotations(clazz, jclazz)
@@ -686,6 +759,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def complete(sym: Symbol): Unit = {
load(sym)
completeRest()
+ markAllCompleted(clazz, module)
}
def completeRest(): Unit = gilSynchronized {
@@ -738,6 +812,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
completeRest()
+ markAllCompleted(clazz, module)
}
}
}
@@ -892,6 +967,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val pkg = owner.newPackage(name)
pkg.moduleClass setInfo new LazyPackageType
pkg setInfoAndEnter pkg.moduleClass.tpe
+ markFlagsCompleted(pkg)(mask = AllFlags)
info("made Scala "+pkg)
pkg
} else
@@ -1074,6 +1150,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
fieldCache.enter(jfield, field)
propagatePackageBoundary(jfield, field)
copyAnnotations(field, jfield)
+ markAllCompleted(field)
field
}
@@ -1100,11 +1177,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
setMethType(meth, tparams, paramtpes, resulttpe)
propagatePackageBoundary(jmeth.javaFlags, meth)
copyAnnotations(meth, jmeth)
-
- if (jmeth.javaFlags.isVarargs)
- meth modifyInfo arrayToRepeated
- else
- meth
+ if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated
+ markAllCompleted(meth)
+ meth
}
/**
@@ -1127,6 +1202,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
+ markAllCompleted(constr)
constr
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index cfd66744ff..f6556a442d 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -12,6 +12,8 @@ class JavaUniverse extends internal.SymbolTable with JavaUniverseForce with Refl
override def inform(msg: String): Unit = log(msg)
def picklerPhase = internal.SomePhase
+ def erasurePhase = internal.SomePhase
+
lazy val settings = new Settings
private val isLogging = sys.props contains "scala.debug.reflect"
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index b9b171c7ed..fb893cbff1 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -32,8 +32,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
// inaccessible: this._skolemizationLevel
// inaccessible: this._undoLog
// inaccessible: this._intersectionWitness
- // inaccessible: this._volatileRecursions
- // inaccessible: this._pendingVolatiles
// inaccessible: this._subsametypeRecursions
// inaccessible: this._pendingSubTypes
// inaccessible: this._basetypeRecursions
@@ -58,6 +56,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.CompoundTypeTreeOriginalAttachment
this.BackquotedIdentifierAttachment
this.ForAttachment
+ this.SyntheticUnitAttachment
this.SubpatternsAttachment
this.noPrint
this.typeDebug
@@ -205,6 +204,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.NoSymbol
this.CyclicReference
// inaccessible: this.TypeHistory
+ this.SymbolOps
this.TermName
this.TypeName
this.Liftable
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 813c0e1386..d642b25127 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -12,7 +12,7 @@ import scala.reflect.internal.util.AbstractFileClassLoader
/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
*/
-private[scala] object ReflectionUtils {
+object ReflectionUtils {
// Unwraps some chained exceptions which arise during reflective calls.
def unwrapThrowable(x: Throwable): Throwable = x match {
case _: InvocationTargetException | // thrown by reflectively invoked method or constructor
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 11db83d7d5..de5ba99900 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -48,4 +48,5 @@ private[reflect] class Settings extends MutableSettings {
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
+ def isScala211 = true
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 30a3855d70..c56bc28d90 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -6,6 +6,7 @@ import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
+import scala.reflect.internal.Flags._
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -17,6 +18,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* is found, a package is created instead.
*/
class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
+ markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
@@ -24,6 +26,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
+ // NOTE: can't mark as thread-safe here, because unpickleClass might decide to delegate to FromJavaClassCompleter
+ // if (!isCompilerUniverse) markAllCompleted(clazz, module)
}
}
override def load(sym: Symbol) = complete(sym)
@@ -64,6 +68,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
// override def safeToString = pkgClass.toString
openPackageModule(sym)
+ markAllCompleted(sym)
}
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index ddbf3bd629..02155578f8 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -28,19 +28,4 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w
* in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
*/
override def isCompilerUniverse = false
-
- /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests.
- *
- * scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions
- * that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors.
- * We could probably fix those, but at the moment it's too risky.
- *
- * Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions.
- * These assumptions are taken care of in this overriden `shouldTriggerCompleter` method.
- */
- override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
- completer match {
- case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0
- case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask)
- }
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 1a232c8de1..f5e16c6640 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -4,6 +4,7 @@ package runtime
import scala.reflect.io.AbstractFile
import scala.collection.{ immutable, mutable }
+import scala.reflect.internal.Flags._
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -31,23 +32,105 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
trait SynchronizedSymbol extends Symbol {
- def gilSynchronizedIfNotInited[T](body: => T): T = {
- // TODO JZ desired, but prone to race conditions. We need the runtime reflection based
- // type completers to establish a memory barrier upon initialization. Maybe a volatile
- // write? We need to consult with the experts here. Until them, lock pessimistically.
- //
- // `run/reflection-sync-subtypes.scala` fails about 1/50 times otherwise.
- //
- // if (isFullyInitialized) body
- // else gilSynchronized { body }
+ /** (Things written in this comment only applies to runtime reflection. Compile-time reflection,
+ * especially across phases and runs, is somewhat more complicated, but we won't be touching it,
+ * because at the moment we only care about synchronizing runtime reflection).
+ *
+ * As it has been noted on multiple occasions, generally speaking, reflection artifacts aren't thread-safe.
+ * Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number
+ * of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make
+ * sure that things stay deterministic). However, in case of symbols there's hope, because it's only during
+ * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable
+ * (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe.
+ *
+ * Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's,
+ * even when a symbol finishes its initialization and becomes immutable, its info forever remains mutable.
+ * Therefore even if we no longer need to synchronize a PackageClassSymbol after it's initialized, we still have to take
+ * care of its ClassInfoType (or, more precisely, of the underlying Scope), but that's done elsewhere, and
+ * here we don't need to worry about that.
+ *
+ * Okay, so now we simply check `Symbol.isInitialized` and if it's true, then everything's fine? Haha, nope!
+ * The thing is that some completers call sym.setInfo when still in-flight and then proceed with initialization
+ * (e.g. see LazyPackageType). Consequently, setInfo sets _validTo to current period, which means that after
+ * a call to setInfo isInitialized will start returning true. Unfortunately, this doesn't mean that info becomes
+ * ready to be used, because subsequent initialization might change the info.
+ *
+ * Therefore we need to somehow distinguish between initialized and really initialized symbol states.
+ * Okay, let's do it on per-completer basis. We have seven kinds of completers to worry about:
+ * 1) LazyPackageType that initializes packages and their underlying package classes
+ * 2) TopClassCompleter that initializes top-level Scala-based class-module companion pairs of static definitions
+ * 3) LazyTypeRef and LazyTypeRefAndAlias set up by TopClassCompleter that initialize (transitive members) of top-level classes/modules
+ * 4) FromJavaClassCompleter that does the same for both top-level and non-toplevel Java-based classes/modules
+ * 5) Fully-initialized signatures of non-class/module Java-based reflection artifacts
+ * 6) Importing completer that transfers metadata from one universe to another
+ * 7) Signatures of special symbols such as roots and symbolsNotPresentInBytecode
+ *
+ * The mechanisms underlying completion are quite complex, and it'd be only natural to suppose that over time we're going to overlook something.
+ * Wrt isThreadsafe we could have two wrong situations: false positives (isThreadsafe = true, but the symbol isn't actually threadsafe)
+ * and false negatives (isThreadsafe = false, but the symbol is actually threadsafe). However, even though both are wrong, only the former
+ * is actively malicious. Indeed, false positives might lead to races, inconsistent state and crashes, while the latter would only cause
+ * `initialize` to be called and a gil to be taken on every potentially auto-initializable operation. Unpleasant yes, but still robust.
+ *
+ * What makes me hopeful is that:
+ * 1) By default (e.g. if some new completion mechanism gets introduced for a special flavor of symbols and we forget to call markCompleted)
+ * isThreadsafe is always in false negative state, which is unpleasant but safe.
+ * 2) Calls to `markCompleted` which are the only potential source of erroneous behavior are few and are relatively easy to place:
+ * just put them just before your completer's `complete` returns, and you should be fine.
+ *
+ * upd. Actually, there's another problem of not keeping initialization mask up-to-date. If we're not careful enough,
+ * then it might so happen that getting a certain flag that the compiler assumes to be definitively set will spuriously
+ * return isThreadsafe(purpose = FlagsOp(<flag>)) = false and that will lead to spurious auto-initialization,
+ * which will cause an SO or a cyclic reference or some other crash. I've done my best to go through all possible completers
+ * and call `markFlagsCompleted` where appropriate, but again over time something might be overlooked, so to guard against that
+ * I'm only considering TopLevelPickledFlags to be sources of potential initialization. This ensures that such system flags as
+ * isMethod, isModule or isPackage are never going to auto-initialize.
+ */
+ override def isThreadsafe(purpose: SymbolOps) = {
+ if (isCompilerUniverse) false
+ else if (_initialized) true
+ else purpose.isFlagRelated && (_initializationMask & purpose.mask & TopLevelPickledFlags) == 0
+ }
+
+ /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders
+ * about the status of initialization of the underlying symbol.
+ *
+ * Unfortunately, it's not as easy as just introducing the `markThreadsafe` method that would be called
+ * by the completers when they are really done (as opposed to `setInfo` that, as mentioned above, doesn't mean anything).
+ *
+ * Since we also want to auto-initialize symbols when certain methods are being called (`Symbol.hasFlag` for example),
+ * we need to track the identity of the initializer, so as to block until initialization is complete if the caller
+ * comes from a different thread, but to skip auto-initialization if we're the initializing thread.
+ *
+ * Just a volatile var is fine, because:
+ * 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized
+ * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts.
+ * 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone
+ * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet)
+ * or a no-op (if the symbol is already inited), and that is fine in both cases.
+ *
+ * upd. It looks like we also need to keep track of a mask of initialized flags to make sure
+ * that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag).
+ * Due to the same reasoning as above, a single volatile var is enough for to store the mask.
+ */
+ @volatile private[this] var _initialized = false
+ @volatile private[this] var _initializationMask = TopLevelPickledFlags
+ override def markFlagsCompleted(mask: Long): this.type = { _initializationMask = _initializationMask & ~mask; this }
+ override def markAllCompleted(): this.type = { _initializationMask = 0L; _initialized = true; this }
+
+ def gilSynchronizedIfNotThreadsafe[T](body: => T): T = {
+ // TODO: debug and fix the race that doesn't allow us uncomment this optimization
+ // if (isCompilerUniverse || isThreadsafe(purpose = AllOps)) body
+ // else gilSynchronized { body }
gilSynchronized { body }
}
- override def validTo = gilSynchronizedIfNotInited { super.validTo }
- override def info = gilSynchronizedIfNotInited { super.info }
- override def rawInfo: Type = gilSynchronizedIfNotInited { super.rawInfo }
+ override def validTo = gilSynchronizedIfNotThreadsafe { super.validTo }
+ override def info = gilSynchronizedIfNotThreadsafe { super.info }
+ override def rawInfo: Type = gilSynchronizedIfNotThreadsafe { super.rawInfo }
+ override def typeSignature: Type = gilSynchronizedIfNotThreadsafe { super.typeSignature }
+ override def typeSignatureIn(site: Type): Type = gilSynchronizedIfNotThreadsafe { super.typeSignatureIn(site) }
- override def typeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ override def typeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
if (isCompilerUniverse) super.typeParams
else {
if (isMonomorphicType) Nil
@@ -63,7 +146,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
}
}
}
- override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
if (isCompilerUniverse) super.unsafeTypeParams
else {
if (isMonomorphicType) Nil
@@ -71,8 +154,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
}
}
- override def isStable: Boolean = gilSynchronized { super.isStable }
-
// ------ creators -------------------------------------------------------------------
override protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol =
@@ -126,7 +207,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
// we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
// unfortunately we cannot elide this lock, because the cache depends on `pre`
private lazy val typeAsMemberOfLock = new Object
- override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotInited { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
+ override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
}
trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
@@ -135,7 +216,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
// unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on
// temporarily assigning NoType to tpeCache to detect cyclic reference errors
private lazy val tpeLock = new Object
- override def tpe_* : Type = gilSynchronizedIfNotInited { tpeLock.synchronized { super.tpe_* } }
+ override def tpe_* : Type = gilSynchronizedIfNotThreadsafe { tpeLock.synchronized { super.tpe_* } }
}
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index 83d471f91e..9bcf85dd6f 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -50,13 +50,6 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]())
override def intersectionWitness = _intersectionWitness.get
- private lazy val _volatileRecursions = mkThreadLocalStorage(0)
- override def volatileRecursions = _volatileRecursions.get
- override def volatileRecursions_=(value: Int) = _volatileRecursions.set(value)
-
- private lazy val _pendingVolatiles = mkThreadLocalStorage(new mutable.HashSet[Symbol])
- override def pendingVolatiles = _pendingVolatiles.get
-
private lazy val _subsametypeRecursions = mkThreadLocalStorage(0)
override def subsametypeRecursions = _subsametypeRecursions.get
override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value)
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 8fba1e538e..d261fc5be9 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -18,7 +18,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
import scala.tools.nsc.util.Exceptional.unwrap
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
@@ -534,8 +534,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
var code = ""
var bound = false
- @throws[ScriptException]
- def compile(script: String): CompiledScript = {
+ def compiled(script: String): CompiledScript = {
if (!bound) {
quietBind("engine" -> this.asInstanceOf[ScriptEngine])
bound = true
@@ -562,18 +561,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- @throws[ScriptException]
- def compile(reader: java.io.Reader): CompiledScript = {
- val writer = new java.io.StringWriter()
- var c = reader.read()
- while(c != -1) {
- writer.write(c)
- c = reader.read()
- }
- reader.close()
- compile(writer.toString())
- }
-
private class WrappedRequest(val req: Request) extends CompiledScript {
var recorded = false
@@ -1014,10 +1001,16 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
@throws[ScriptException]
- def eval(script: String, context: ScriptContext): Object = compile(script).eval(context)
+ def compile(script: String): CompiledScript = eval("new javax.script.CompiledScript { def eval(context: javax.script.ScriptContext): Object = { " + script + " }.asInstanceOf[Object]; def getEngine: javax.script.ScriptEngine = engine }").asInstanceOf[CompiledScript]
+
+ @throws[ScriptException]
+ def compile(reader: java.io.Reader): CompiledScript = compile(stringFromReader(reader))
+
+ @throws[ScriptException]
+ def eval(script: String, context: ScriptContext): Object = compiled(script).eval(context)
@throws[ScriptException]
- def eval(reader: java.io.Reader, context: ScriptContext): Object = compile(reader).eval(context)
+ def eval(reader: java.io.Reader, context: ScriptContext): Object = eval(stringFromReader(reader), context)
override def finalize = close
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index e5dbaa3fd5..5ea1443a19 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -122,6 +122,11 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
)
+ val docAuthor = BooleanSetting (
+ "-author",
+ "Include authors."
+ )
+
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -207,7 +212,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
- docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docAuthor, docDiagrams, docDiagramsDebug, docDiagramsDotPath,
docDiagramsDotTimeout, docDiagramsDotRestart,
docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 119d4e0143..26ee005d3e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -351,6 +351,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
+ val authorComment =
+ if (! s.docAuthor || mbr.comment.isEmpty ||
+ mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NodeSeq.Empty
+ else <div class="comment cmt">
+ {if (mbr.comment.get.authors.size > 1) <h6>Authors:</h6> else <h6>Author:</h6>}
+ { mbr.comment.get.authors map bodyToHtml}
+ </div>
+
val paramComments = {
val prs: List[ParameterEntity] = mbr match {
case cls: Class => cls.typeParams ::: cls.valueParams.flatten
@@ -681,7 +689,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
+ memberComment ++ authorComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 8f217e087c..ef84ac42ba 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -10,6 +10,7 @@ import diagram._
import scala.collection._
import scala.util.matching.Regex
+import scala.reflect.macros.internal.macroImpl
import symtab.Flags
import io._
@@ -80,7 +81,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- def annotations = sym.annotations.map(makeAnnotation)
+ def annotations = sym.annotations.filterNot(_.tpe =:= typeOf[macroImpl]).map(makeAnnotation)
def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
def isType = sym.name.isTypeName
}
@@ -145,6 +146,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
+ if (sym.isMacro) fgs += Paragraph(Text("macro"))
fgs.toList
}
def deprecation =
diff --git a/test/disabled/run/t7843-jsr223-service.scala b/test/disabled/run/t7843-jsr223-service.scala
deleted file mode 100644
index e2ea850698..0000000000
--- a/test/disabled/run/t7843-jsr223-service.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*DISABLED:
- For Paul, it steals focus when it runs.
-
- For me, it fails with some platform specific extra output:
-
- -ScriptEngineManager providers.next(): javax.script.ScriptEngineFactory: Provider apple.applescript.AppleScriptEngin
- n: Object = 10
- 12345678910
-*/
-import javax.script._
-import scala.tools.nsc.interpreter.IMain
-
-object Test extends App {
- val engine = new ScriptEngineManager getEngineByName "scala"
- engine.asInstanceOf[IMain].settings.usejavacp.value = true
- engine put ("n", 10)
- engine eval "1 to n.asInstanceOf[Int] foreach print"
-}
diff --git a/test/files/filters b/test/files/filters
index 70d7dcff70..51a7507848 100644
--- a/test/files/filters
+++ b/test/files/filters
@@ -4,3 +4,5 @@ Java HotSpot\(TM\) .* warning:
# Hotspot receiving VM options through the $_JAVA_OPTIONS
# env variable outputs them on stderr
Picked up _JAVA_OPTIONS:
+# Filter out a message caused by this bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=8021205
+objc\[\d+\]: Class JavaLaunchHelper is implemented in both .* and .*\. One of the two will be used\. Which one is undefined\.
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
index 549abd5e64..b6951f71ee 100644
--- a/test/files/jvm/t6941/Analyzed_1.scala
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -6,6 +6,6 @@ class SameBytecode {
}
def b(xs: List[Int]) = xs match {
- case xs: ::[Int] => xs.hd$1
+ case xs: ::[Int] => xs.head
}
} \ No newline at end of file
diff --git a/test/files/neg/abstract-report2.check b/test/files/neg/abstract-report2.check
index 6ea949353a..9be3d822f2 100644
--- a/test/files/neg/abstract-report2.check
+++ b/test/files/neg/abstract-report2.check
@@ -61,7 +61,7 @@ it has 13 unimplemented members.
class Baz[T] extends Collection[T]
^
-abstract-report2.scala:11: error: class Dingus needs to be abstract, since:
+abstract-report2.scala:15: error: class Dingus needs to be abstract, since:
it has 24 unimplemented members.
/** As seen from class Dingus, the missing signatures are as follows.
* For convenience, these are usable as stub implementations.
@@ -84,9 +84,6 @@ it has 24 unimplemented members.
def toIterator: Iterator[(Set[Int], String)] = ???
def toStream: Stream[(Set[Int], String)] = ???
- // Members declared in scala.math.Ordering
- def compare(x: List[Int],y: List[Int]): Int = ???
-
// Members declared in scala.collection.TraversableOnce
def copyToArray[B >: (Set[Int], String)](xs: Array[B],start: Int,len: Int): Unit = ???
def exists(p: ((Set[Int], String)) => Boolean): Boolean = ???
@@ -98,6 +95,9 @@ it has 24 unimplemented members.
def seq: scala.collection.TraversableOnce[(Set[Int], String)] = ???
def toTraversable: Traversable[(Set[Int], String)] = ???
+ // Members declared in Xyz
+ def foo(x: List[Int]): Boolean = ???
+
class Dingus extends Bippy[String, Set[Int], List[Int]]
^
four errors found
diff --git a/test/files/neg/abstract-report2.scala b/test/files/neg/abstract-report2.scala
index b6327b0766..8825340d4a 100644
--- a/test/files/neg/abstract-report2.scala
+++ b/test/files/neg/abstract-report2.scala
@@ -6,6 +6,10 @@ class Bar extends Collection[List[_ <: String]]
class Baz[T] extends Collection[T]
-trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Ordering[T3]
+trait Xyz[T] {
+ def foo(x: T): Boolean
+}
+
+trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Xyz[T3]
class Dingus extends Bippy[String, Set[Int], List[Int]] \ No newline at end of file
diff --git a/test/files/neg/accesses.check b/test/files/neg/accesses.check
index 5a5e03233e..db58af12ce 100644
--- a/test/files/neg/accesses.check
+++ b/test/files/neg/accesses.check
@@ -1,3 +1,7 @@
+accesses.scala:23: error: overriding method f2 in class A of type ()Unit;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Unit = ()
+ ^
accesses.scala:24: error: overriding method f3 in class A of type ()Unit;
method f3 has weaker access privileges; it should be at least protected
private[p2] def f3(): Unit = ()
@@ -10,4 +14,4 @@ accesses.scala:26: error: overriding method f5 in class A of type ()Unit;
method f5 has weaker access privileges; it should be at least protected[p1]
protected[p2] def f5(): Unit
^
-three errors found
+four errors found
diff --git a/test/files/neg/accesses2.check b/test/files/neg/accesses2.check
index 554a7b4c81..66cf9a116e 100644
--- a/test/files/neg/accesses2.check
+++ b/test/files/neg/accesses2.check
@@ -1,4 +1,12 @@
+accesses2.scala:6: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Int = 1
+ ^
accesses2.scala:5: error: class B1 needs to be abstract, since method f2 in class A of type ()Int is not defined
class B1 extends A {
^
-one error found
+accesses2.scala:9: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Int = 1
+ ^
+three errors found
diff --git a/test/files/neg/missing-param-type-tuple.check b/test/files/neg/missing-param-type-tuple.check
index bc46ba1023..3a4258ff8c 100644
--- a/test/files/neg/missing-param-type-tuple.check
+++ b/test/files/neg/missing-param-type-tuple.check
@@ -1,6 +1,6 @@
missing-param-type-tuple.scala:3: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 2-Tuple.
- Consider a pattern matching anoynmous function, `{ case (a, b) => ... }`
+ Consider a pattern matching anonymous function, `{ case (a, b) => ... }`
val x: ((Int, Int)) => Int = (a, b) => 0
^
missing-param-type-tuple.scala:3: error: missing parameter type
@@ -8,7 +8,7 @@ missing-param-type-tuple.scala:3: error: missing parameter type
^
missing-param-type-tuple.scala:5: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 3-Tuple.
- Consider a pattern matching anoynmous function, `{ case (param1, ..., param3) => ... }`
+ Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }`
val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
^
missing-param-type-tuple.scala:5: error: missing parameter type
@@ -19,7 +19,7 @@ missing-param-type-tuple.scala:5: error: missing parameter type
^
missing-param-type-tuple.scala:7: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 3-Tuple.
- Consider a pattern matching anoynmous function, `{ case (param1, ..., param3) => ... }`
+ Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }`
val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
^
missing-param-type-tuple.scala:7: error: missing parameter type
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 880ddc4327..20ddd55f1f 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -88,7 +88,7 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
- required: ?T
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
Error occurred in an application involving default arguments.
test4()
^
diff --git a/test/files/neg/no-implicit-to-anyref-any-val.check b/test/files/neg/no-implicit-to-anyref-any-val.check
new file mode 100644
index 0000000000..5953e1bd6d
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref-any-val.check
@@ -0,0 +1,34 @@
+no-implicit-to-anyref-any-val.scala:11: error: the result type of an implicit conversion must be more specific than AnyRef
+ 1: AnyRef
+ ^
+no-implicit-to-anyref-any-val.scala:17: error: type mismatch;
+ found : Any
+ required: AnyRef
+ (null: Any): AnyRef
+ ^
+no-implicit-to-anyref-any-val.scala:21: error: type mismatch;
+ found : AnyVal
+ required: AnyRef
+ (0: AnyVal): AnyRef
+ ^
+no-implicit-to-anyref-any-val.scala:27: error: type mismatch;
+ found : Test.AV
+ required: AnyRef
+Note that AV extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ new AV(0): AnyRef
+ ^
+no-implicit-to-anyref-any-val.scala:30: error: the result type of an implicit conversion must be more specific than AnyVal
+ "": AnyVal
+ ^
+no-implicit-to-anyref-any-val.scala:32: error: type mismatch;
+ found : Object
+ required: AnyVal
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method ArrowAssoc in object Predef of type [A](self: A)ArrowAssoc[A]
+ and method Ensuring in object Predef of type [A](self: A)Ensuring[A]
+ are possible conversion functions from Object to AnyVal
+ new Object() : AnyVal
+ ^
+6 errors found
diff --git a/test/files/neg/no-implicit-to-anyref.scala b/test/files/neg/no-implicit-to-anyref-any-val.scala
index 3e3d373e38..f5daf541af 100644
--- a/test/files/neg/no-implicit-to-anyref.scala
+++ b/test/files/neg/no-implicit-to-anyref-any-val.scala
@@ -26,4 +26,8 @@ object Test {
locally {
new AV(0): AnyRef
}
+
+ "": AnyVal
+
+ new Object() : AnyVal
}
diff --git a/test/files/neg/no-implicit-to-anyref.check b/test/files/neg/no-implicit-to-anyref.check
deleted file mode 100644
index fe417ad8b0..0000000000
--- a/test/files/neg/no-implicit-to-anyref.check
+++ /dev/null
@@ -1,22 +0,0 @@
-no-implicit-to-anyref.scala:11: error: the result type of an implicit conversion must be more specific than AnyRef
- 1: AnyRef
- ^
-no-implicit-to-anyref.scala:17: error: type mismatch;
- found : Any
- required: AnyRef
- (null: Any): AnyRef
- ^
-no-implicit-to-anyref.scala:21: error: type mismatch;
- found : AnyVal
- required: AnyRef
- (0: AnyVal): AnyRef
- ^
-no-implicit-to-anyref.scala:27: error: type mismatch;
- found : Test.AV
- required: AnyRef
-Note that AV extends Any, not AnyRef.
-Such types can participate in value classes, but instances
-cannot appear in singleton types or in reference comparisons.
- new AV(0): AnyRef
- ^
-four errors found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check
index 25e5b8d75a..14fef16e01 100644
--- a/test/files/neg/quasiquotes-syntax-error-position.check
+++ b/test/files/neg/quasiquotes-syntax-error-position.check
@@ -29,4 +29,7 @@ quasiquotes-syntax-error-position.scala:13: error: end of quote expected but 'ca
quasiquotes-syntax-error-position.scala:14: error: ')' expected but end of quote found.
pq"$a(bar"
^
-10 errors found
+quasiquotes-syntax-error-position.scala:15: error: ':' expected but ')' found.
+ q"def foo(x)"
+ ^
+11 errors found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.scala b/test/files/neg/quasiquotes-syntax-error-position.scala
index b97af52cfc..7b1d66ba00 100644
--- a/test/files/neg/quasiquotes-syntax-error-position.scala
+++ b/test/files/neg/quasiquotes-syntax-error-position.scala
@@ -12,4 +12,5 @@ object test extends App {
tq"$t => $t $t]"
cq"pattern => body ; case pattern2 =>"
pq"$a(bar"
-} \ No newline at end of file
+ q"def foo(x)"
+}
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
index 457f497f2f..703846ad62 100644
--- a/test/files/neg/stringinterpolation_macro-neg.check
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -1,61 +1,61 @@
-stringinterpolation_macro-neg.scala:8: error: too few parts
+stringinterpolation_macro-neg.scala:13: error: there are no parts
new StringContext().f()
^
-stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:14: error: too few arguments for interpolated string
new StringContext("", " is ", "%2d years old").f(s)
^
-stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string
+stringinterpolation_macro-neg.scala:15: error: too many arguments for interpolated string
new StringContext("", " is ", "%2d years old").f(s, d, d)
^
-stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:16: error: too few arguments for interpolated string
new StringContext("", "").f()
^
-stringinterpolation_macro-neg.scala:14: error: type mismatch;
+stringinterpolation_macro-neg.scala:19: error: type mismatch;
found : String
required: Boolean
f"$s%b"
^
-stringinterpolation_macro-neg.scala:15: error: type mismatch;
+stringinterpolation_macro-neg.scala:20: error: type mismatch;
found : String
required: Char
f"$s%c"
^
-stringinterpolation_macro-neg.scala:16: error: type mismatch;
+stringinterpolation_macro-neg.scala:21: error: type mismatch;
found : Double
required: Char
f"$f%c"
^
-stringinterpolation_macro-neg.scala:17: error: type mismatch;
+stringinterpolation_macro-neg.scala:22: error: type mismatch;
found : String
required: Int
f"$s%x"
^
-stringinterpolation_macro-neg.scala:18: error: type mismatch;
+stringinterpolation_macro-neg.scala:23: error: type mismatch;
found : Boolean
required: Int
f"$b%d"
^
-stringinterpolation_macro-neg.scala:19: error: type mismatch;
+stringinterpolation_macro-neg.scala:24: error: type mismatch;
found : String
required: Int
f"$s%d"
^
-stringinterpolation_macro-neg.scala:20: error: type mismatch;
+stringinterpolation_macro-neg.scala:25: error: type mismatch;
found : Double
required: Int
f"$f%o"
^
-stringinterpolation_macro-neg.scala:21: error: type mismatch;
+stringinterpolation_macro-neg.scala:26: error: type mismatch;
found : String
required: Double
f"$s%e"
^
-stringinterpolation_macro-neg.scala:22: error: type mismatch;
+stringinterpolation_macro-neg.scala:27: error: type mismatch;
found : Boolean
required: Double
f"$b%f"
^
-stringinterpolation_macro-neg.scala:27: error: type mismatch;
+stringinterpolation_macro-neg.scala:32: error: type mismatch;
found : String
required: Int
Note that implicit conversions are not applicable because they are ambiguous:
@@ -64,7 +64,109 @@ Note that implicit conversions are not applicable because they are ambiguous:
are possible conversion functions from String to Int
f"$s%d"
^
-stringinterpolation_macro-neg.scala:30: error: illegal conversion character
+stringinterpolation_macro-neg.scala:35: error: illegal conversion character 'i'
f"$s%i"
^
-15 errors found
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '+'
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ' '
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '0'
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ','
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '('
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:39: error: Only '-' allowed for c conversion
+ f"$c%#+ 0,(c"
+ ^
+stringinterpolation_macro-neg.scala:40: error: # not allowed for d conversion
+ f"$d%#d"
+ ^
+stringinterpolation_macro-neg.scala:41: error: ',' only allowed for d conversion of integral types
+ f"$d%,x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use '+' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use ' ' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use '(' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:43: error: ',' not allowed for a, A
+ f"$f%,(a"
+ ^
+stringinterpolation_macro-neg.scala:43: error: '(' not allowed for a, A
+ f"$f%,(a"
+ ^
+stringinterpolation_macro-neg.scala:44: error: Only '-' allowed for date/time conversions
+ f"$t%#+ 0,(tT"
+ ^
+stringinterpolation_macro-neg.scala:47: error: precision not allowed
+ f"$c%.2c"
+ ^
+stringinterpolation_macro-neg.scala:48: error: precision not allowed
+ f"$d%.2d"
+ ^
+stringinterpolation_macro-neg.scala:49: error: precision not allowed
+ f"%.2%"
+ ^
+stringinterpolation_macro-neg.scala:50: error: precision not allowed
+ f"%.2n"
+ ^
+stringinterpolation_macro-neg.scala:51: error: precision not allowed
+ f"$f%.2a"
+ ^
+stringinterpolation_macro-neg.scala:52: error: precision not allowed
+ f"$t%.2tT"
+ ^
+stringinterpolation_macro-neg.scala:55: error: No last arg
+ f"%<s"
+ ^
+stringinterpolation_macro-neg.scala:56: error: No last arg
+ f"%<c"
+ ^
+stringinterpolation_macro-neg.scala:57: error: No last arg
+ f"%<tT"
+ ^
+stringinterpolation_macro-neg.scala:58: error: Argument index out of range
+ f"${8}%d ${9}%d%3$$d"
+ ^
+stringinterpolation_macro-neg.scala:59: error: Argument index out of range
+ f"${8}%d ${9}%d%0$$d"
+ ^
+stringinterpolation_macro-neg.scala:62: warning: Index is not this arg
+ f"${8}%d ${9}%1$$d"
+ ^
+stringinterpolation_macro-neg.scala:63: warning: Argument index ignored if '<' flag is present
+ f"$s%s $s%s %1$$<s"
+ ^
+stringinterpolation_macro-neg.scala:64: warning: Index is not this arg
+ f"$s%s $s%1$$s"
+ ^
+stringinterpolation_macro-neg.scala:67: error: type mismatch;
+ found : String
+ required: java.util.Formattable
+ f"$s%#s"
+ ^
+stringinterpolation_macro-neg.scala:70: error: 'G' doesn't seem to be a date or time conversion
+ f"$t%tG"
+ ^
+stringinterpolation_macro-neg.scala:71: error: Date/time conversion must have two characters
+ f"$t%t"
+ ^
+stringinterpolation_macro-neg.scala:72: error: Missing conversion operator in '%10.5'; use %% for literal %, %n for newline
+ f"$s%10.5"
+ ^
+stringinterpolation_macro-neg.scala:75: error: conversions must follow a splice; use %% for literal %, %n for newline
+ f"${d}random-leading-junk%d"
+ ^
+three warnings found
+45 errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala
index ac9d97d678..3869d42d66 100644
--- a/test/files/neg/stringinterpolation_macro-neg.scala
+++ b/test/files/neg/stringinterpolation_macro-neg.scala
@@ -3,6 +3,11 @@ object Test extends App {
val d = 8
val b = false
val f = 3.14159
+ val c = 'c'
+ val t = new java.util.Date
+ val x = new java.util.Formattable {
+ def formatTo(ff: java.util.Formatter, g: Int, w: Int, p: Int): Unit = ff format "xxx"
+ }
// 1) number of arguments
new StringContext().f()
@@ -28,4 +33,44 @@ object Test extends App {
}
f"$s%i"
+
+ // 3) flag mismatches
+ f"$s%+ 0,(s"
+ f"$c%#+ 0,(c"
+ f"$d%#d"
+ f"$d%,x"
+ f"$d%+ (x"
+ f"$f%,(a"
+ f"$t%#+ 0,(tT"
+
+ // 4) bad precisions
+ f"$c%.2c"
+ f"$d%.2d"
+ f"%.2%"
+ f"%.2n"
+ f"$f%.2a"
+ f"$t%.2tT"
+
+ // 5) bad indexes
+ f"%<s"
+ f"%<c"
+ f"%<tT"
+ f"${8}%d ${9}%d%3$$d"
+ f"${8}%d ${9}%d%0$$d"
+
+ // warnings
+ f"${8}%d ${9}%1$$d"
+ f"$s%s $s%s %1$$<s"
+ f"$s%s $s%1$$s"
+
+ // 6) bad arg types
+ f"$s%#s"
+
+ // 7) misunderstood conversions
+ f"$t%tG"
+ f"$t%t"
+ f"$s%10.5"
+
+ // 8) other brain failures
+ f"${d}random-leading-junk%d"
}
diff --git a/test/files/neg/t6844.check b/test/files/neg/t6844.check
new file mode 100644
index 0000000000..1fc2485520
--- /dev/null
+++ b/test/files/neg/t6844.check
@@ -0,0 +1,6 @@
+t6844.scala:4: error: type mismatch;
+ found : reflect.runtime.universe.TermName
+ required: reflect.runtime.universe.Tree
+ q"def foo($x)"
+ ^
+one error found
diff --git a/test/files/neg/t6844.scala b/test/files/neg/t6844.scala
new file mode 100644
index 0000000000..809d9d0f98
--- /dev/null
+++ b/test/files/neg/t6844.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+object Test extends App {
+ val x = TermName("x")
+ q"def foo($x)"
+}
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
index d2c40f4df8..61c33f99b1 100644
--- a/test/files/neg/t7325.check
+++ b/test/files/neg/t7325.check
@@ -1,13 +1,13 @@
-t7325.scala:2: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:2: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%")
^
-t7325.scala:4: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:4: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%%%")
^
-t7325.scala:6: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:6: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%%%%%")
^
-t7325.scala:16: error: wrong conversion string
+t7325.scala:16: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"${0}%")
^
t7325.scala:19: error: conversions must follow a splice; use %% for literal %, %n for newline
diff --git a/test/files/neg/t8143a.check b/test/files/neg/t8143a.check
new file mode 100644
index 0000000000..4e11000a2a
--- /dev/null
+++ b/test/files/neg/t8143a.check
@@ -0,0 +1,5 @@
+t8143a.scala:2: error: overriding method f in class Foo of type => Int;
+ method f has weaker access privileges; it should not be private
+class Bar extends Foo { private def f = 10 }
+ ^
+one error found
diff --git a/test/files/neg/t8143a.scala b/test/files/neg/t8143a.scala
new file mode 100644
index 0000000000..4ec539e671
--- /dev/null
+++ b/test/files/neg/t8143a.scala
@@ -0,0 +1,15 @@
+class Foo { def f = 5 }
+class Bar extends Foo { private def f = 10 }
+
+
+class Foo1 { private def f = 5 }
+class Bar1 extends Foo1 { def f = 10 } // okay
+
+class Foo2 { private def f = 5 }
+class Bar2 extends Foo2 { private def f = 10 } // okay
+
+class Foo3 { private[this] def f = 5 }
+class Bar3 extends Foo3 { private def f = 10 } // okay
+
+class Foo4 { private def f = 5 }
+class Bar4 extends Foo4 { private[this] def f = 10 } // okay \ No newline at end of file
diff --git a/test/files/neg/t8182.check b/test/files/neg/t8182.check
new file mode 100644
index 0000000000..a156d70883
--- /dev/null
+++ b/test/files/neg/t8182.check
@@ -0,0 +1,22 @@
+t8182.scala:4: error: illegal start of simple pattern
+}
+^
+t8182.scala:7: error: illegal start of simple pattern
+}
+^
+t8182.scala:6: error: type application is not allowed in pattern
+ val a b[B] // error then continue as for X
+ ^
+t8182.scala:10: error: illegal start of simple pattern
+ case a b[B] => // bumpy recovery
+ ^
+t8182.scala:10: error: type application is not allowed in pattern
+ case a b[B] => // bumpy recovery
+ ^
+t8182.scala:11: error: '=>' expected but '}' found.
+ }
+ ^
+t8182.scala:16: error: type application is not allowed in pattern
+ case a B[T] b =>
+ ^
+7 errors found
diff --git a/test/files/neg/t8182.scala b/test/files/neg/t8182.scala
new file mode 100644
index 0000000000..1b3bc9821f
--- /dev/null
+++ b/test/files/neg/t8182.scala
@@ -0,0 +1,18 @@
+
+trait X {
+ val a b // something missing
+}
+trait Y {
+ val a b[B] // error then continue as for X
+}
+trait Z {
+ (null: Any) match {
+ case a b[B] => // bumpy recovery
+ }
+}
+object B { def unapply[W](a: Any) = Some((1,2)) }
+trait Z {
+ (null: Any) match {
+ case a B[T] b =>
+ }
+}
diff --git a/test/files/neg/t8207.check b/test/files/neg/t8207.check
new file mode 100644
index 0000000000..59facd897a
--- /dev/null
+++ b/test/files/neg/t8207.check
@@ -0,0 +1,7 @@
+t8207.scala:1: error: '.' expected but '}' found.
+class C { import C.this.toString }
+ ^
+t8207.scala:3: error: '.' expected but '}' found.
+class D { import D.this.toString }
+ ^
+two errors found
diff --git a/test/files/neg/t8207.scala b/test/files/neg/t8207.scala
new file mode 100644
index 0000000000..738ce381f4
--- /dev/null
+++ b/test/files/neg/t8207.scala
@@ -0,0 +1,3 @@
+class C { import C.this.toString }
+
+class D { import D.this.toString }
diff --git a/test/files/neg/t8228.check b/test/files/neg/t8228.check
new file mode 100644
index 0000000000..02eff4b1b7
--- /dev/null
+++ b/test/files/neg/t8228.check
@@ -0,0 +1,4 @@
+t8228.scala:4: error: recursive value foo needs type
+ val foo = foo(null)
+ ^
+one error found
diff --git a/test/files/neg/t8228.scala b/test/files/neg/t8228.scala
new file mode 100644
index 0000000000..19d71aeab4
--- /dev/null
+++ b/test/files/neg/t8228.scala
@@ -0,0 +1,7 @@
+object X {
+ def bar = {
+ def foo(x: Any) = ""
+ val foo = foo(null)
+ foo(null) // cycle in isApplicableBasedOnArity
+ }
+}
diff --git a/test/files/neg/t8237-default.check b/test/files/neg/t8237-default.check
new file mode 100644
index 0000000000..59fe21ed03
--- /dev/null
+++ b/test/files/neg/t8237-default.check
@@ -0,0 +1,13 @@
+t8237-default.scala:5: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
+ test4(test4$default$1)
+ ^
+t8237-default.scala:5: error: type mismatch;
+ found : List[Int]
+ required: T[T[List[T[X forSome { type X }]]]]
+ test4(test4$default$1)
+ ^
+two errors found
diff --git a/test/files/neg/t8237-default.scala b/test/files/neg/t8237-default.scala
new file mode 100644
index 0000000000..f695aa523f
--- /dev/null
+++ b/test/files/neg/t8237-default.scala
@@ -0,0 +1,29 @@
+// This test case was extracte from `names-defaults-neg.scala`
+// It pinpoints an improvement an error message that results from
+// a type inference failure
+object Test extends App {
+ test4(test4$default$1)
+
+ def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ???
+ def test4$default$1[T[P]]: List[Int] = ???
+}
+
+/*
+OLD:
+ no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T
+ test4(test4$default$1)
+ ^
+
+NEW:
+
+no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]
+ test4(test4$default$1)
+*/
diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala
index e312f9abbf..84a01bcce5 100644
--- a/test/files/pos/annotated-original/M_1.scala
+++ b/test/files/pos/annotated-original/M_1.scala
@@ -2,6 +2,6 @@ import language.experimental.macros
import scala.reflect.macros.blackbox.Context
object M {
- def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.resetLocalAttrs(a.tree))
+ def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree))
def m(a: Any) = macro impl
}
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index b02864b994..fdf9c72c31 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -44,7 +44,7 @@ object Macros {
val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice
def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice
})
- val untyped = c.resetLocalAttrs(template.tree)
+ val untyped = c.untypecheck(template.tree)
c.Expr[T => U](untyped)
case _ => sys.error("Bad function type")
diff --git a/test/files/pos/implicit-anyval-2.10.flags b/test/files/pos/implicit-anyval-2.10.flags
new file mode 100644
index 0000000000..94c8056747
--- /dev/null
+++ b/test/files/pos/implicit-anyval-2.10.flags
@@ -0,0 +1 @@
+-Xsource:2.10
diff --git a/test/files/pos/implicit-anyval-2.10.scala b/test/files/pos/implicit-anyval-2.10.scala
new file mode 100644
index 0000000000..3082af73b8
--- /dev/null
+++ b/test/files/pos/implicit-anyval-2.10.scala
@@ -0,0 +1,3 @@
+object Test {
+ "": AnyVal // newly prohibited in 2.11, allowed under -Xsourse:2.10
+} \ No newline at end of file
diff --git a/test/files/pos/t2066-2.10-compat.flags b/test/files/pos/t2066-2.10-compat.flags
new file mode 100644
index 0000000000..94c8056747
--- /dev/null
+++ b/test/files/pos/t2066-2.10-compat.flags
@@ -0,0 +1 @@
+-Xsource:2.10
diff --git a/test/files/pos/t2066-2.10-compat.scala b/test/files/pos/t2066-2.10-compat.scala
new file mode 100644
index 0000000000..fb8103e4ad
--- /dev/null
+++ b/test/files/pos/t2066-2.10-compat.scala
@@ -0,0 +1,71 @@
+import language._
+trait A1 {
+ def f[T[_]] = ()
+}
+
+trait B1 extends A1 {
+ override def f[T[+_]] = ()
+}
+
+trait C1 extends A1 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A2 {
+ def f[T[+_]] = ()
+}
+
+trait B2 extends A2 {
+ override def f[T[_]] = () // okay
+}
+
+trait C2 extends A2 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A3 {
+ def f[T[-_]] = ()
+}
+
+trait B3 extends A3 {
+ override def f[T[_]] = () // okay
+}
+
+trait C3 extends A3 {
+ override def f[T[-_]] = ()
+}
+
+
+trait A4 {
+ def f[T[X[+_]]] = ()
+}
+
+trait B4 extends A4 {
+ override def f[T[X[_]]] = ()
+}
+
+trait A5 {
+ def f[T[X[-_]]] = ()
+}
+
+trait B5 extends A5 {
+ override def f[T[X[_]]] = ()
+}
+
+
+
+trait A6 {
+ def f[T[X[_]]] = ()
+}
+
+trait B6 extends A6 {
+ override def f[T[X[+_]]] = () // okay
+}
+trait C6 extends A6 {
+ override def f[T[X[_]]] = () // okay
+}
+trait D6 extends A6 {
+ override def f[T[X[-_]]] = ()
+}
diff --git a/test/files/pos/t6169/Exist.java b/test/files/pos/t6169/Exist.java
new file mode 100644
index 0000000000..dfc6b36b33
--- /dev/null
+++ b/test/files/pos/t6169/Exist.java
@@ -0,0 +1,4 @@
+public class Exist<T extends String> {
+ // java helpfully re-interprets Exist<?> as Exist<? extends String>
+ public Exist<?> foo() { throw new RuntimeException(); }
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/ExistF.java b/test/files/pos/t6169/ExistF.java
new file mode 100644
index 0000000000..70fabd74cf
--- /dev/null
+++ b/test/files/pos/t6169/ExistF.java
@@ -0,0 +1,4 @@
+public class ExistF<T extends ExistF<T>> {
+ // java helpfully re-interprets ExistF<?> as ExistF<?0 extends ExistF<?0>>
+ public ExistF<?> foo() { throw new RuntimeException(); }
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/ExistIndir.java b/test/files/pos/t6169/ExistIndir.java
new file mode 100644
index 0000000000..e66d1698c4
--- /dev/null
+++ b/test/files/pos/t6169/ExistIndir.java
@@ -0,0 +1,4 @@
+public class ExistIndir<T extends String, U extends T> {
+ // java helpfully re-interprets ExistIndir<?> as ExistIndir<? extends String>
+ public ExistIndir<?, ?> foo() { throw new RuntimeException(); }
+}
diff --git a/test/files/pos/t6169/OP.java b/test/files/pos/t6169/OP.java
new file mode 100644
index 0000000000..15e4c5640f
--- /dev/null
+++ b/test/files/pos/t6169/OP.java
@@ -0,0 +1 @@
+public abstract class OP<T> { }
diff --git a/test/files/pos/t6169/Skin.java b/test/files/pos/t6169/Skin.java
new file mode 100644
index 0000000000..780de1ee09
--- /dev/null
+++ b/test/files/pos/t6169/Skin.java
@@ -0,0 +1 @@
+public interface Skin<C extends Skinnable> { }
diff --git a/test/files/pos/t6169/Skinnable.java b/test/files/pos/t6169/Skinnable.java
new file mode 100644
index 0000000000..f91eaa30d8
--- /dev/null
+++ b/test/files/pos/t6169/Skinnable.java
@@ -0,0 +1,3 @@
+public interface Skinnable {
+ OP<Skin<?>> skinProperty();
+}
diff --git a/test/files/pos/t6169/skinnable.scala b/test/files/pos/t6169/skinnable.scala
new file mode 100644
index 0000000000..3ba2734526
--- /dev/null
+++ b/test/files/pos/t6169/skinnable.scala
@@ -0,0 +1,14 @@
+object ObjectProperty {
+ implicit def jfxObjectProperty2sfx[T](p: OP[T]) = new ObjectProperty[T](p)
+}
+
+class ObjectProperty[T](val delegate: OP[T])
+
+trait TestWildcardBoundInference {
+ def delegate: Skinnable
+ def skin: ObjectProperty[Skin[_ /* inferred: <: Skinnable */]] = ObjectProperty.jfxObjectProperty2sfx(delegate.skinProperty)
+ skin: ObjectProperty[Skin[_ <: Skinnable]]
+
+ def skinCheckInference = delegate.skinProperty
+ skinCheckInference: ObjectProperty[Skin[_ <: Skinnable]]
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/t6169.scala b/test/files/pos/t6169/t6169.scala
new file mode 100644
index 0000000000..37f42619ca
--- /dev/null
+++ b/test/files/pos/t6169/t6169.scala
@@ -0,0 +1,7 @@
+class Test {
+ class MyExist extends ExistF[MyExist]
+ // SI-8197, SI-6169: java infers the bounds of existentials, so we have to as well now that SI-1786 is fixed...
+ def stringy: Exist[_ <: String] = (new Exist[String]).foo
+ def fbounded: (ExistF[t] forSome {type t <: ExistF[t] }) = (new MyExist).foo
+ def indir: ExistIndir[_ <: String, _ <: String] = (new ExistIndir[String, String]).foo
+} \ No newline at end of file
diff --git a/test/files/pos/t7322.scala b/test/files/pos/t7322.scala
new file mode 100644
index 0000000000..006bf89e9f
--- /dev/null
+++ b/test/files/pos/t7322.scala
@@ -0,0 +1,11 @@
+
+package object t7322 {
+ implicit class X(sc: StringContext) {
+ def x_?(args: Any*) = "hi there"
+ }
+}
+package t7322 {
+ trait Y {
+ x_?"junk" // assume that if it compiles, it works
+ }
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
index 9f51248095..b38687c8b3 100644
--- a/test/files/pos/t7377/Macro_1.scala
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -2,6 +2,6 @@ import language.experimental._
import scala.reflect.macros.blackbox.Context
object M {
- def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.resetLocalAttrs(expr.tree)))
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.untypecheck(expr.tree)))
def noop[A](expr: A): A = macro noopImpl[A]
}
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
index 3bba19966d..3bd477dcda 100644
--- a/test/files/pos/t7516/A_1.scala
+++ b/test/files/pos/t7516/A_1.scala
@@ -3,7 +3,7 @@ import scala.reflect._,macros._, scala.language.experimental.macros
object A {
def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
val r = c.universe.reify { List(t.splice) }
- c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+ c.Expr[List[T]]( c.untypecheck(r.tree) )
}
def demo[T](t: T): List[T] = macro impl[T]
}
diff --git a/test/files/pos/t7919.scala b/test/files/pos/t7919.scala
new file mode 100644
index 0000000000..64f261ec16
--- /dev/null
+++ b/test/files/pos/t7919.scala
@@ -0,0 +1,6 @@
+
+object X {
+ val x = s""
+ val y = true
+}
+
diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala
index dd42950b34..9f1e6955b4 100644
--- a/test/files/pos/t8064/Macro_1.scala
+++ b/test/files/pos/t8064/Macro_1.scala
@@ -5,6 +5,6 @@ object Macro {
def apply(a: Any): Any = macro impl
def impl(c: Context)(a: c.Tree): c.Tree = {
- c.resetLocalAttrs(a)
+ c.untypecheck(a)
}
}
diff --git a/test/files/pos/t8170.scala b/test/files/pos/t8170.scala
new file mode 100644
index 0000000000..b65f4b8572
--- /dev/null
+++ b/test/files/pos/t8170.scala
@@ -0,0 +1,27 @@
+object O {
+ trait X
+ trait B extends A {
+ override type T[F1 <: X] = F1
+ }
+ trait A {
+ type T[F <: X]
+ }
+}
+
+object Test {
+ import O._
+ val a: B = ???
+ val b: a.T[X] = ???
+ b.ensuring(x => true) // trigger an implicit search
+}
+
+
+/*
+this = {AliasArgsTypeRef@3004}"Test#7680.a#14899.T#14823[O#7702.X#7793]"
+ sym = type T#14823
+ info = namer: [F#14824 <: O#7703.X#7793]F#14824
+result = {AbstractNoArgsTypeRef@3237}"F#24451"
+tp = {PolyType@3235}"[F#14824 <: O#7703.X#7793]F#14824"
+tparams =
+ (0) = {AbstractTypeSymbol@3247}"type F#24451"
+*/ \ No newline at end of file
diff --git a/test/files/pos/t8170b.scala b/test/files/pos/t8170b.scala
new file mode 100644
index 0000000000..53036f6c8a
--- /dev/null
+++ b/test/files/pos/t8170b.scala
@@ -0,0 +1,25 @@
+import language._
+
+object ScalaZeee {
+ trait HFold[M[_], U] {
+ type Apply[E, A <: U] <: U
+ }
+ trait GenericCons[M[_], H, +T <: GenericList[M]] extends GenericList[M] {
+ val tail: T
+ override type Folded[N[X] >: M[X], U, F <: HFold[N, U]] = F#Apply[H, tail.Folded[N, U, F]]
+ }
+ val KNil: GenericList[Nothing] = ???
+ sealed trait GenericList[+M[_]] {
+ type Folded[N[X] >: M[X], U, F <: HFold[N, U]] <: U
+ }
+}
+
+object TypelevelUsage {
+ import ScalaZeee._
+ type T = GenericCons[Some, String, KNil.type]
+ val klist1: T = ???
+ type T2 = klist1.Folded[Option, Int, HFold[Option, Int]]
+ val count2: T2 = ???
+
+ count2.ensuring(x => true).toChar // trigger an implicit search
+}
diff --git a/test/files/pos/t8207.scala b/test/files/pos/t8207.scala
new file mode 100644
index 0000000000..680b40f379
--- /dev/null
+++ b/test/files/pos/t8207.scala
@@ -0,0 +1,6 @@
+class C { me =>
+ import me.{toString => ts}
+ locally(this: me.type)
+ trait T
+ type X = me.T
+}
diff --git a/test/files/pos/t8237.scala b/test/files/pos/t8237.scala
new file mode 100644
index 0000000000..005089079e
--- /dev/null
+++ b/test/files/pos/t8237.scala
@@ -0,0 +1,29 @@
+import scala.language.higherKinds
+
+object TestExplicit {
+ trait TC[A]
+ def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+ implicit def tc[T]: TC[T] = ???
+
+ // Typechecking results in SOE in TypeVar.isGround
+ fTt(1)(tc)
+ // fun = TestExplicit.this.fTt[Int, E](1)
+ // args = TestExplicit.this.tc[E[Int]]
+ // argTpes.head.instantiateTypeParams = TC[?E#1[Int]]
+ // formals.head.instantiateTypeParams = TC[?E#2[Int]]
+ // (where ?E#1 and ?E#2 as distinct AppliedTypeVars that resulted
+ // from separate applications of type args to the same HKTypeVar, ?E)
+ //
+ // As we check if the argument conforms to the formal, we would have
+ // AppliedTypeVars sharing the same TypeConstraints on the LHS and RHS,
+ // which leads to a cyclic constraint.
+}
+
+object TestImplicit {
+ trait TC[A]
+ def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+ implicit def tc[T]: TC[T] = ???
+
+ // Oddly enough, this one works.
+ fTt(1)
+}
diff --git a/test/files/pos/t8237b.scala b/test/files/pos/t8237b.scala
new file mode 100644
index 0000000000..52bb310e8b
--- /dev/null
+++ b/test/files/pos/t8237b.scala
@@ -0,0 +1,10 @@
+import scala.language.higherKinds
+import scala.reflect.runtime.universe._
+object Test {
+
+ def fTt[A,E[X]<:List[X]](a: A)(implicit tt: TypeTag[E[A]]) = a
+
+ trait TC[A]
+ implicit def TCListInt[A]: TC[A] = ???
+ fTt(1)
+}
diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala
index c3ace590ed..1d4891fd1f 100644
--- a/test/files/run/global-showdef.scala
+++ b/test/files/run/global-showdef.scala
@@ -54,7 +54,7 @@ object Bippy {
val run = new compiler.Run()
run.compileSources(List(src))
}
- output.linesIterator.toList
+ output.lines.toList
}
def showClass(name: String) = lines("-Yshow:typer", "-Xshow-class", name)
def showObject(name: String) = lines("-Yshow:typer", "-Xshow-object", name)
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
index 624479480d..f038d8714f 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.blackbox.Context
object Impls {
def foo(c: Context)(x: c.Expr[Int]) = {
import c.universe._
- val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
+ val x1 = c.Expr[Int](c.untypecheck(x.tree))
c.Expr[Int](Literal(Constant(c.eval(x1))))
}
}
diff --git a/test/files/run/private-override.scala b/test/files/run/private-override.scala
deleted file mode 100644
index 0a3f57f97c..0000000000
--- a/test/files/run/private-override.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package test.p1.p2 {
- abstract class A {
- private[p2] def f2(): Int = 1
- }
- abstract class Other extends A {
- // It's a private method - not a private[p2] method. Not a failed
- // "weaker access privileges" override, a different namespace.
- private def f2(): Int = super.f2() + 2
- def go() = f2()
- }
-}
-
-object Test extends test.p1.p2.Other {
- def main(args: Array[String]): Unit = {
- println(go())
- }
-}
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
index 352aefaf25..43116858de 100644
--- a/test/files/run/reflection-magicsymbols-invoke.check
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -28,7 +28,7 @@ it's important to print the list of AnyVal's members
if some of them change (possibly, adding and/or removing magic symbols), we must update this test
constructor AnyVal: ()AnyVal
method getClass: ()Class[_ <: AnyVal]
-testing AnyVal.<init>: class java.lang.InstantiationException: null
+testing AnyVal.<init>: class scala.ScalaReflectionException: unsupported symbol constructor AnyVal when invoking bytecodeless method mirror for scala.AnyVal.<init>(): AnyVal (bound to null)
testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass
============
AnyRef
diff --git a/test/pending/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala
index 0ad5f2ab66..0c96974df7 100644
--- a/test/pending/run/reflection-sync-potpourri.scala
+++ b/test/files/run/reflection-sync-potpourri.scala
@@ -24,7 +24,7 @@ object Test extends App {
override def run(): Unit = {
val s1 = foo("42")
val s2 = perms(diceRolls(i - 1)).map(x => force(x)).sorted.mkString(", ")
- assert(s1 == "java.lang.String")
+ assert(s1 == "String" || s1 == "java.lang.String")
assert(s2 == "java.lang.annotation.Annotation, java.lang.reflect.Method, scala.io.BufferedSource, scala.io.Codec")
}
})
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
index be62c5780b..ead61e76ac 100644
--- a/test/files/run/stringinterpolation_macro-run.check
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -46,6 +46,8 @@ S
120
120
120
+ 0X4
+She is 4 feet tall.
120
42
3.400000e+00
@@ -60,3 +62,6 @@ S
05/26/12
05/26/12
05/26/12
+%
+7 7 9
+7 9 9
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
index 1138cd0860..ff779dd1d3 100644
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -72,6 +72,14 @@ println(f"${120 : java.lang.Integer}%d")
println(f"${120 : java.lang.Long}%d")
println(f"${BigInt(120)}%d")
println(f"${new java.math.BigInteger("120")}%d")
+println(f"${4}%#10X")
+
+locally {
+ val fff = new java.util.Formattable {
+ def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4")
+ }
+ println(f"She is ${fff}%#s feet tall.")
+}
{
implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
@@ -103,4 +111,11 @@ println(f"${c.getTime.getTime}%TD")
implicit val strToDate = (x: String) => c
println(f"""${"1234"}%TD""")
+
+
+// literals and arg indexes
+println(f"%%")
+println(f"${7}%d %<d ${9}%d")
+println(f"${7}%d %2$$d ${9}%d")
+
}
diff --git a/test/files/run/t6261.scala b/test/files/run/t6261.scala
index b4463256c9..bf6d640de3 100644
--- a/test/files/run/t6261.scala
+++ b/test/files/run/t6261.scala
@@ -2,12 +2,6 @@ import scala.collection.immutable._
object Test extends App {
- def test0() {
- val m=ListMap(1->2,3->4)
- if(m.tail ne m.tail)
- println("ListMap.tail uses a builder, so it is not O(1)")
- }
-
def test1() {
// test that a HashTrieMap with one leaf element is not created!
val x = HashMap.empty + (1->1) + (2->2)
@@ -92,7 +86,6 @@ object Test extends App {
// StructureTests.printStructure(z)
require(z.size == 2 && z.contains(a._1) && z.contains(c._1))
}
- test0()
test1()
test2()
test3()
diff --git a/test/files/run/t6411a.check b/test/files/run/t6411a.check
new file mode 100644
index 0000000000..9226146195
--- /dev/null
+++ b/test/files/run/t6411a.check
@@ -0,0 +1,96 @@
+meth = method yg_1
+as seen by Scala reflection: def yg_1[T](y: Y[T]): T
+as seen by Java reflection: public java.lang.Object a$.yg_1(java.lang.Object)
+result = 1
+meth = method yg_1
+as seen by Scala reflection: def yg_1[T](y: Y[T]): T
+as seen by Java reflection: public java.lang.Object a$.yg_1(java.lang.Object)
+result = 1
+meth = method yi_2
+as seen by Scala reflection: def yi_2(y: Y[Int]): Int
+as seen by Java reflection: public int a$.yi_2(java.lang.Integer)
+result = 2
+meth = method yi_2
+as seen by Scala reflection: def yi_2(y: Y[Int]): Int
+as seen by Java reflection: public int a$.yi_2(java.lang.Integer)
+result = class java.lang.IllegalArgumentException: argument type mismatch
+meth = method ys_3
+as seen by Scala reflection: def ys_3(y: Y[String]): String
+as seen by Java reflection: public java.lang.String a$.ys_3(java.lang.String)
+result = class java.lang.IllegalArgumentException: argument type mismatch
+meth = method ys_3
+as seen by Scala reflection: def ys_3(y: Y[String]): String
+as seen by Java reflection: public java.lang.String a$.ys_3(java.lang.String)
+result = 3
+meth = method ya_4
+as seen by Scala reflection: def ya_4(ys: Array[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.ya_4(Y[])
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method ya_4
+as seen by Scala reflection: def ya_4(ys: Array[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.ya_4(Y[])
+result = List(4)
+meth = method yl_5
+as seen by Scala reflection: def yl_5(ys: List[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.yl_5(scala.collection.immutable.List)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method yl_5
+as seen by Scala reflection: def yl_5(ys: List[Y[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.yl_5(scala.collection.immutable.List)
+result = List(5)
+meth = method yni_7
+as seen by Scala reflection: def yni_7(y: => Y[Int]): Int
+as seen by Java reflection: public int a$.yni_7(scala.Function0)
+result = 7
+meth = method yns_8
+as seen by Scala reflection: def yns_8(y: => Y[String]): String
+as seen by Java reflection: public java.lang.String a$.yns_8(scala.Function0)
+result = 8
+meth = method zg_1
+as seen by Scala reflection: def zg_1[T](z: Z[T]): T
+as seen by Java reflection: public java.lang.Object a$.zg_1(Z)
+result = 1
+meth = method zg_1
+as seen by Scala reflection: def zg_1[T](z: Z[T]): T
+as seen by Java reflection: public java.lang.Object a$.zg_1(Z)
+result = 1
+meth = method zi_2
+as seen by Scala reflection: def zi_2(z: Z[Int]): Int
+as seen by Java reflection: public int a$.zi_2(Z)
+result = 2
+meth = method zi_2
+as seen by Scala reflection: def zi_2(z: Z[Int]): Int
+as seen by Java reflection: public int a$.zi_2(Z)
+result = class java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Integer
+meth = method zs_3
+as seen by Scala reflection: def zs_3(z: Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zs_3(Z)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method zs_3
+as seen by Scala reflection: def zs_3(z: Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zs_3(Z)
+result = 3
+meth = method za_4
+as seen by Scala reflection: def za_4(zs: Array[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.za_4(Z[])
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method za_4
+as seen by Scala reflection: def za_4(zs: Array[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.za_4(Z[])
+result = List(4)
+meth = method zl_5
+as seen by Scala reflection: def zl_5(zs: List[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.zl_5(scala.collection.immutable.List)
+result = class java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
+meth = method zl_5
+as seen by Scala reflection: def zl_5(zs: List[Z[String]]): List[String]
+as seen by Java reflection: public scala.collection.immutable.List a$.zl_5(scala.collection.immutable.List)
+result = List(5)
+meth = method zni_7
+as seen by Scala reflection: def zni_7(z: => Z[Int]): Int
+as seen by Java reflection: public int a$.zni_7(scala.Function0)
+result = 7
+meth = method zns_8
+as seen by Scala reflection: def zns_8(z: => Z[String]): String
+as seen by Java reflection: public java.lang.String a$.zns_8(scala.Function0)
+result = 8
diff --git a/test/files/run/t6411a.scala b/test/files/run/t6411a.scala
new file mode 100644
index 0000000000..3bfeac2890
--- /dev/null
+++ b/test/files/run/t6411a.scala
@@ -0,0 +1,81 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.language.reflectiveCalls
+
+class Y[T](val i: T) extends AnyVal {
+ override def toString = s"Y($i)"
+}
+class Z[T](val i: T) extends AnyRef {
+ override def toString = s"Z($i)"
+}
+
+object a {
+ def yg_1[T](y: Y[T]) = y.i
+ def yi_2(y: Y[Int]) = y.i
+ def ys_3(y: Y[String]) = y.i
+ def ya_4(ys: Array[Y[String]]) = ys.toList.map(_.i)
+ def yl_5(ys: List[Y[String]]) = ys.map(_.i)
+ def yv_6(ys: Y[String]*) = ys.toList.map(_.i)
+ def yni_7(y: => Y[Int]) = y.i
+ def yns_8(y: => Y[String]) = y.i
+
+ def zg_1[T](z: Z[T]) = z.i
+ def zi_2(z: Z[Int]) = z.i
+ def zs_3(z: Z[String]) = z.i
+ def za_4(zs: Array[Z[String]]) = zs.toList.map(_.i)
+ def zl_5(zs: List[Z[String]]) = zs.map(_.i)
+ def zv_6(zs: Z[String]*) = zs.toList.map(_.i)
+ def zni_7(z: => Z[Int]) = z.i
+ def zns_8(z: => Z[String]) = z.i
+}
+
+object Test extends App {
+ def test(methName: String, arg: Any) = {
+ val moduleA = cm.reflect(a)
+ val msym = moduleA.symbol.typeSignature.declaration(TermName(methName)).asMethod
+ println(s"meth = $msym")
+ val mmirror = moduleA.reflectMethod(msym)
+ val mresult =
+ try { mmirror(arg) }
+ catch {
+ case ex: Exception =>
+ val ex1 = scala.reflect.runtime.ReflectionUtils.unwrapThrowable(ex)
+ s"${ex1.getClass}: ${ex1.getMessage}"
+ }
+ println(s"as seen by Scala reflection: ${msym.asInstanceOf[scala.reflect.internal.Symbols#Symbol].defString}")
+ println(s"as seen by Java reflection: ${mmirror.asInstanceOf[{val jmeth: java.lang.reflect.Method}].jmeth}")
+ println(s"result = $mresult")
+ }
+
+ test("yg_1", new Y(1))
+ test("yg_1", new Y("1"))
+ test("yi_2", new Y(2))
+ test("yi_2", new Y("2"))
+ test("ys_3", new Y(3))
+ test("ys_3", new Y("3"))
+ test("ya_4", Array(new Y(4)))
+ test("ya_4", Array(new Y("4")))
+ test("yl_5", List(new Y(5)))
+ test("yl_5", List(new Y("5")))
+ // FIXME: disabled because of SI-7056
+ // test("yv_6", new Y(6))
+ // test("yv_6", new Y("6"))
+ test("yni_7", new Y(7))
+ test("yns_8", new Y("8"))
+
+ test("zg_1", new Z(1))
+ test("zg_1", new Z("1"))
+ test("zi_2", new Z(2))
+ test("zi_2", new Z("2"))
+ test("zs_3", new Z(3))
+ test("zs_3", new Z("3"))
+ test("za_4", Array(new Z(4)))
+ test("za_4", Array(new Z("4")))
+ test("zl_5", List(new Z(5)))
+ test("zl_5", List(new Z("5")))
+ // FIXME: disabled because of SI-7056
+ // test("zv_6", new Z(6))
+ // test("zv_6", new Z("6"))
+ test("zni_7", new Z(7))
+ test("zns_8", new Z("8"))
+} \ No newline at end of file
diff --git a/test/files/run/t6411b.check b/test/files/run/t6411b.check
new file mode 100644
index 0000000000..e20bed6d8d
--- /dev/null
+++ b/test/files/run/t6411b.check
@@ -0,0 +1 @@
+Bar(Foo(3))
diff --git a/test/files/run/t6411b.scala b/test/files/run/t6411b.scala
new file mode 100644
index 0000000000..af30108826
--- /dev/null
+++ b/test/files/run/t6411b.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+
+case class Foo(n: Int) extends AnyVal
+case class Bar(foo: Foo)
+
+object Test extends App {
+ val mirror = runtimeMirror(getClass.getClassLoader)
+ val cm = mirror.reflectClass(typeOf[Bar].typeSymbol.asClass)
+ val ctor = typeOf[Bar].declaration(nme.CONSTRUCTOR).asMethod
+ val ctorm = cm.reflectConstructor(ctor)
+ println(ctorm(Foo(3)))
+} \ No newline at end of file
diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala
index 019ddf7cd6..c6e976038d 100644
--- a/test/files/run/t7240/Macros_1.scala
+++ b/test/files/run/t7240/Macros_1.scala
@@ -41,7 +41,7 @@ object Bakery {
def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List())
c.eval(c.Expr[Any](
- c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor))))
+ c.untypecheck(Block(composeDSL(Literal(Constant(1))), constructor))))
c.Expr[Any](Literal(Constant(1)))
}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index d03ee3a6cf..b7443aa0c4 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -21,7 +21,7 @@ scala> convert(Some[Int](0))
--- because ---
argument expression's type is not compatible with formal parameter type;
found : Some[Int]
- required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } }
convert(Some[Int](0))
^
<console>:12: error: type mismatch;
diff --git a/test/files/run/t7328.check b/test/files/run/t7328.check
new file mode 100644
index 0000000000..e386fe70d9
--- /dev/null
+++ b/test/files/run/t7328.check
@@ -0,0 +1,4 @@
+Foo
+Foo(3)
+Foo(3)
+Foo(5)
diff --git a/test/files/run/t7328.scala b/test/files/run/t7328.scala
new file mode 100644
index 0000000000..8816fa2347
--- /dev/null
+++ b/test/files/run/t7328.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+case class Foo(x: Int) extends AnyVal
+case class Bar(foo: Foo)
+
+object Test extends App {
+ val foo = typeOf[Bar].declaration(TermName("foo")).asMethod
+ println(foo.returnType) // Foo
+
+ val bar = Bar(Foo(3))
+ println(bar.foo) // Foo(3)
+
+ val im = cm.reflect(bar)
+ println(im.reflectField(foo).get) // incorrectly gives java.lang.Integer(3) not Foo(3)
+ im.reflectField(foo).set(Foo(5)) // java.lang.IllegalArgumentException: Can not set int field Bar.foo to Foo
+ println(im.reflectMethod(foo)()) // incorrectly gives java.lang.Integer(3) not Foo(3)
+} \ No newline at end of file
diff --git a/test/files/run/t7445.scala b/test/files/run/t7445.scala
new file mode 100644
index 0000000000..e4ffeb8e1a
--- /dev/null
+++ b/test/files/run/t7445.scala
@@ -0,0 +1,6 @@
+import scala.collection.immutable.ListMap
+
+object Test extends App {
+ val a = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5);
+ require(a.tail == ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5));
+}
diff --git a/test/files/run/t7700.check b/test/files/run/t7700.check
new file mode 100644
index 0000000000..ca8e686984
--- /dev/null
+++ b/test/files/run/t7700.check
@@ -0,0 +1,2 @@
+public abstract java.lang.Object C.bar(java.lang.Object)
+public abstract java.lang.Object C.foo(java.lang.Object)
diff --git a/test/files/run/t7700.scala b/test/files/run/t7700.scala
new file mode 100644
index 0000000000..76d16b808c
--- /dev/null
+++ b/test/files/run/t7700.scala
@@ -0,0 +1,17 @@
+import scala.annotation._
+
+trait C[@specialized U] {
+ @unspecialized
+ def foo(u: U): U
+ @unspecialized
+ def bar[A](u: U) = u
+}
+
+object Test extends App {
+ val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName)
+ println(declared.mkString("\n"))
+ object CInt extends C[Int] { def foo(i: Int) = i }
+ object CAny extends C[Any] { def foo(a: Any) = a }
+ assert(CInt.foo(1) == 1)
+ assert(CAny.foo("") == "")
+}
diff --git a/test/disabled/run/t7843-jsr223-service.check b/test/files/run/t7843-jsr223-service.check
index a668df3567..a668df3567 100644
--- a/test/disabled/run/t7843-jsr223-service.check
+++ b/test/files/run/t7843-jsr223-service.check
diff --git a/test/files/run/t7843-jsr223-service.scala b/test/files/run/t7843-jsr223-service.scala
new file mode 100644
index 0000000000..31112212ea
--- /dev/null
+++ b/test/files/run/t7843-jsr223-service.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.interpreter.IMain
+
+object Test extends App {
+ val engine = new IMain.Factory getScriptEngine()
+ engine.asInstanceOf[IMain].settings.usejavacp.value = true
+ engine put ("n", 10)
+ engine eval "1 to n.asInstanceOf[Int] foreach print"
+}
diff --git a/test/files/run/t7933.check b/test/files/run/t7933.check
new file mode 100644
index 0000000000..317e9677c3
--- /dev/null
+++ b/test/files/run/t7933.check
@@ -0,0 +1,2 @@
+hello
+hello
diff --git a/test/files/run/t7933.scala b/test/files/run/t7933.scala
new file mode 100644
index 0000000000..b06dffcd80
--- /dev/null
+++ b/test/files/run/t7933.scala
@@ -0,0 +1,11 @@
+import scala.tools.nsc.interpreter.IMain
+
+object Test extends App {
+ val engine = new IMain.Factory getScriptEngine()
+ engine.asInstanceOf[IMain].settings.usejavacp.value = true
+ val res2 = engine.asInstanceOf[javax.script.Compilable]
+ res2 compile "8" eval()
+ val res5 = res2 compile """println("hello") ; 8"""
+ res5 eval()
+ res5 eval()
+}
diff --git a/test/files/run/t8199.scala b/test/files/run/t8199.scala
new file mode 100644
index 0000000000..50994159ed
--- /dev/null
+++ b/test/files/run/t8199.scala
@@ -0,0 +1,105 @@
+class reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname {
+object obj0
+object obj01
+object obj012
+object obj0123
+object obj01234
+object obj012345
+object obj0123456
+object obj01234567
+object obj012345678
+object obj0123456789
+object obj01234567890
+class cls0
+class cls01
+class cls012
+class cls0123
+class cls01234
+class cls012345
+class cls0123456
+class cls01234567
+class cls012345678
+class cls0123456789
+class cls01234567890
+trait trt0 { def x = Test.checkCallerImplClassName() }
+trait trt01 { def x = Test.checkCallerImplClassName() }
+trait trt012 { def x = Test.checkCallerImplClassName() }
+trait trt0123 { def x = Test.checkCallerImplClassName() }
+trait trt01234 { def x = Test.checkCallerImplClassName() }
+trait trt012345 { def x = Test.checkCallerImplClassName() }
+trait trt0123456 { def x = Test.checkCallerImplClassName() }
+trait trt01234567 { def x = Test.checkCallerImplClassName() }
+trait trt012345678 { def x = Test.checkCallerImplClassName() }
+trait trt0123456789 { def x = Test.checkCallerImplClassName() }
+trait trt01234567890 { def x = Test.checkCallerImplClassName() }
+}
+
+object Test extends App {
+ def check(c: Class[_]) {
+ checkClassName(c.getName)
+ }
+ def checkClassName(name: String) {
+ val defaultMaxClassFileLength = 255
+ assert((name + ".class").length <= defaultMaxClassFileLength, name)
+ }
+ def checkCallerImplClassName() {
+ val name = Thread.currentThread.getStackTrace.apply(2).getClassName
+ assert(name.contains("$class"))
+ Test.checkClassName(name)
+ }
+
+ val c = new reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname
+ import c._
+
+ check(obj0.getClass)
+ check(obj01.getClass)
+ check(obj012.getClass)
+ check(obj0123.getClass)
+ check(obj01234.getClass)
+ check(obj012345.getClass)
+ check(obj0123456.getClass)
+ check(obj01234567.getClass)
+ check(obj012345678.getClass)
+ check(obj0123456789.getClass)
+ check(obj01234567890.getClass)
+
+ check(classOf[cls0])
+ check(classOf[cls01])
+ check(classOf[cls012])
+ check(classOf[cls0123])
+ check(classOf[cls01234])
+ check(classOf[cls012345])
+ check(classOf[cls0123456])
+ check(classOf[cls01234567])
+ check(classOf[cls012345678])
+ check(classOf[cls0123456789])
+ check(classOf[cls01234567890])
+
+ // interface facets
+ check(classOf[trt0])
+ check(classOf[trt01])
+ check(classOf[trt012])
+ check(classOf[trt0123])
+ check(classOf[trt01234])
+ check(classOf[trt012345])
+ check(classOf[trt0123456])
+ check(classOf[trt01234567])
+ check(classOf[trt012345678])
+ check(classOf[trt0123456789])
+ check(classOf[trt01234567890])
+
+ // impl classes are harder to find the names of to test!
+ (new trt0 {}).x
+ (new trt01 {}).x
+ (new trt012 {}).x
+ (new trt0123 {}).x
+ (new trt01234 {}).x
+ (new trt012345 {}).x
+ (new trt0123456 {}).x
+ (new trt01234567 {}).x
+ (new trt012345678 {}).x
+ (new trt0123456789 {}).x
+ (new trt01234567890 {}).x
+}
+
+// filename too long: reallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongnamereallylongname$obj012345$.class
diff --git a/test/files/run/t8233-bcode.flags b/test/files/run/t8233-bcode.flags
new file mode 100644
index 0000000000..c30091d3de
--- /dev/null
+++ b/test/files/run/t8233-bcode.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode
diff --git a/test/files/run/t8233-bcode.scala b/test/files/run/t8233-bcode.scala
new file mode 100644
index 0000000000..fae1c2b702
--- /dev/null
+++ b/test/files/run/t8233-bcode.scala
@@ -0,0 +1,18 @@
+object Test {
+ def bar(s: String) = s;
+ val o: Option[Null] = None
+ def nullReference {
+ val a: Null = o.get
+ bar(a) // Was: VerifyError under GenICode
+ }
+
+ def literal {
+ val a: Null = null
+ bar(a)
+ }
+
+ def main(args: Array[String]) = {
+ try { nullReference } catch { case _: NoSuchElementException => }
+ literal
+ }
+}
diff --git a/test/files/run/t8233.scala b/test/files/run/t8233.scala
new file mode 100644
index 0000000000..fae1c2b702
--- /dev/null
+++ b/test/files/run/t8233.scala
@@ -0,0 +1,18 @@
+object Test {
+ def bar(s: String) = s;
+ val o: Option[Null] = None
+ def nullReference {
+ val a: Null = o.get
+ bar(a) // Was: VerifyError under GenICode
+ }
+
+ def literal {
+ val a: Null = null
+ bar(a)
+ }
+
+ def main(args: Array[String]) = {
+ try { nullReference } catch { case _: NoSuchElementException => }
+ literal
+ }
+}
diff --git a/test/files/run/t8245.scala b/test/files/run/t8245.scala
new file mode 100644
index 0000000000..d44defbb9e
--- /dev/null
+++ b/test/files/run/t8245.scala
@@ -0,0 +1,14 @@
+object Test {
+ def foo(o: Option[Int]): Int = {
+ lazy val i: Int = {
+ def local: Int = {if ("".isEmpty) return 42; -42}
+ assert(local == 42)
+ o.getOrElse(return -1)
+ }
+ i + 1
+ }
+
+ def main(args: Array[String]) {
+ assert(foo(None) == -1)
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
index 2af656c7c9..dcd4f63a4d 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
@@ -7,7 +7,9 @@ object DefinitionConstructionProps
with TraitConstruction
with TypeDefConstruction
with ValDefConstruction
- with PackageConstruction {
+ with DefConstruction
+ with PackageConstruction
+ with ImportConstruction {
property("SI-6842") = test {
val x: Tree = q"val x: Int"
assertEqAst(q"def f($x) = 0", "def f(x: Int) = 0")
@@ -228,13 +230,13 @@ trait MethodConstruction { self: QuasiquoteProperties =>
property("splice type name into annotation") = test {
val name = TypeName("annot")
- assertSameAnnots(q"@$name def foo", List(annot(name)))
+ assertSameAnnots(q"@$name def foo", List(q"new $name"))
}
property("splice ident into annotation") = test {
val name = TypeName("annot")
val ident = Ident(name)
- assertSameAnnots(q"@$ident def foo", List(annot(name)))
+ assertSameAnnots(q"@$ident def foo", List(q"new $name"))
}
property("splice idents into annotation") = test {
@@ -244,36 +246,36 @@ trait MethodConstruction { self: QuasiquoteProperties =>
}
property("splice constructor calls into annotation") = test {
- val ctorcalls = List(annot("a1"), annot("a2"))
+ val ctorcalls = List(q"new a1", q"new a2")
assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
}
property("splice multiple annotations (1)") = test {
- val annot1 = annot("a1")
- val annot2 = annot("a2")
+ val annot1 = q"new a1"
+ val annot2 = q"new a2"
val res = q"@$annot1 @$annot2 def foo"
assertSameAnnots(res, List(annot1, annot2))
}
property("splice multiple annotations (2)") = test {
- val annot1 = annot("a1")
- val annots = List(annot("a2"), annot("a3"))
+ val annot1 = q"new a1"
+ val annots = List(q"new a2", q"new a3")
val res = q"@$annot1 @..$annots def foo"
assertSameAnnots(res, annot1 :: annots)
}
property("splice annotations with arguments (1)") = test {
- val a = annot("a", List(q"x"))
+ val a = q"new a(x)"
assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
}
property("splice annotations with arguments (2)") = test {
- val a = newTypeName("a")
+ val a = TypeName("a")
assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
}
property("splice annotations with arguments (3") = test {
- val a = Ident(newTypeName("a"))
+ val a = Ident(TypeName("a"))
assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
}
@@ -285,7 +287,7 @@ trait MethodConstruction { self: QuasiquoteProperties =>
}
property("can't splice annotations with arguments specificed twice") = test {
- val a = annot("a", List(q"x"))
+ val a = q"new a(x)"
assertThrows[IllegalArgumentException] {
q"@$a(y) def foo"
}
@@ -349,4 +351,43 @@ trait PackageConstruction { self: QuasiquoteProperties =>
assertEqAst(q"package object foo extends { ..$edefs } with Any",
"package object foo extends { val x = 1; type I = Int } with Any")
}
-} \ No newline at end of file
+}
+
+trait DefConstruction { self: QuasiquoteProperties =>
+ property("construct implicit args (1)") = test {
+ val x = q"val x: Int"
+ assertEqAst(q"def foo(implicit $x) = x", "def foo(implicit x: Int) = x")
+ }
+
+ property("construct implicit args (2)") = test {
+ val xs = q"val x1: Int" :: q"val x2: Long" :: Nil
+ assertEqAst(q"def foo(implicit ..$xs) = x1 + x2", "def foo(implicit x1: Int, x2: Long) = x1 + x2")
+ }
+}
+
+trait ImportConstruction { self: QuasiquoteProperties =>
+ property("construct wildcard import") = test {
+ val sel = pq"_"
+ assert(q"import foo.$sel" ≈ q"import foo._")
+ }
+
+ property("construct named import") = test {
+ val sel = pq"bar"
+ assert(q"import foo.$sel" ≈ q"import foo.bar")
+ }
+
+ property("construct renaming import") = test {
+ val sel = pq"bar -> baz"
+ assert(q"import foo.$sel" ≈ q"import foo.{bar => baz}")
+ }
+
+ property("construct unimport import") = test {
+ val sels = pq"poison -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{poison => _, _}")
+ }
+
+ property("construct mixed import") = test {
+ val sels = pq"a -> b" :: pq"c -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{a => b, c => _, _}")
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
index 94465930ed..e2d1757d48 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
@@ -8,7 +8,9 @@ object DefinitionDeconstructionProps
with ObjectDeconstruction
with ModsDeconstruction
with ValVarDeconstruction
+ with DefDeconstruction
with PackageDeconstruction
+ with ImportDeconstruction
trait TraitDeconstruction { self: QuasiquoteProperties =>
property("exhaustive trait matcher") = test {
@@ -124,18 +126,28 @@ trait ModsDeconstruction { self: QuasiquoteProperties =>
}
property("@..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
+ val a = q"new a"
+ val b = q"new b"
val q"@..$annots def foo" = q"@$a @$b def foo"
annots ≈ List(a, b)
}
property("@$annot @..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
- val c = annot("c")
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
- first ≈ a && rest ≈ List(b, c)
+ assert(first ≈ a)
+ assert(rest ≈ List(b, c))
+ }
+
+ property("@..$anots @$annot def foo") = test {
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
+ val q"@..$init @$last def foo" = q"@$a @$b @$c def foo"
+ assert(init ≈ List(a, b))
+ assert(last ≈ c)
}
}
@@ -179,4 +191,84 @@ trait PackageDeconstruction { self: QuasiquoteProperties =>
matches("package object foo extends { val early = 1 } with daddy")
assertThrows[MatchError] { matches("object foo") }
}
-} \ No newline at end of file
+}
+
+trait DefDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive def matcher") = test {
+ def matches(line: String) = {
+ val t = parse(line)
+ val q"$mods0 def $name0[..$targs0](...$argss0): $restpe0 = $body0" = t
+ val q"$mods1 def $name1[..$targs1](...$argss1)(implicit ..$impl1): $restpe1 = $body1" = t
+ }
+ matches("def foo = foo")
+ matches("implicit def foo: Int = 2")
+ matches("def foo[T](x: T): T = x")
+ matches("def foo[A: B] = implicitly[B[A]]")
+ matches("private def foo = 0")
+ matches("def foo[A <% B] = null")
+ matches("def foo(one: One)(two: Two) = (one, two)")
+ matches("def foo[T](args: T*) = args.toList")
+ }
+
+ property("extract implicit arg list (1)") = test {
+ val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)(implicit y: Int)"
+ assert(impl ≈ List(q"${Modifiers(IMPLICIT | PARAM)} val y: Int"))
+ }
+
+ property("extract implicit arg list (2)") = test {
+ val q"def foo(...$argss)(implicit ..$impl)" = q"def foo(x: Int)"
+ assert(impl.isEmpty)
+ }
+}
+
+trait ImportDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive import matcher") = test {
+ def matches(line: String) = {
+ val q"import $ref.{..$sels}" = parse(line)
+ }
+ matches("import foo.bar")
+ matches("import foo.{bar, baz}")
+ matches("import foo.{a => b, c => d}")
+ matches("import foo.{poision => _, _}")
+ matches("import foo.bar.baz._")
+ }
+
+ property("extract import binding") = test {
+ val q"import $_.$sel" = q"import foo.bar"
+ val pq"bar" = sel
+ }
+
+ property("extract import wildcard") = test {
+ val q"import $_.$sel" = q"import foo._"
+ val pq"_" = sel
+ }
+
+ property("extract import rename") = test {
+ val q"import $_.$sel" = q"import foo.{bar => baz}"
+ val pq"bar -> baz" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"baz" = right
+ }
+
+ property("extract import unimport") = test {
+ val q"import $_.$sel" = q"import foo.{bar => _}"
+ val pq"bar -> _" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"_" = right
+ }
+
+ property("splice names into import selector") = forAll {
+ (expr: Tree, plain: TermName, oldname: TermName, newname: TermName, discard: TermName) =>
+
+ val Import(expr1, List(
+ ImportSelector(plain11, _, plain12, _),
+ ImportSelector(oldname1, _, newname1, _),
+ ImportSelector(discard1, _, wildcard, _))) =
+ q"import $expr.{$plain, $oldname => $newname, $discard => _}"
+
+ expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
+ oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
index 92d299bede..1ba9bba381 100644
--- a/test/files/scalacheck/quasiquotes/ErrorProps.scala
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -32,12 +32,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
q"@...$annots def foo"
""")
- property("@..$first @$rest def foo") = fails(
- "Can't extract with .. here",
- """
- q"@a @b @c def foo" match { case q"@..$first @$rest def foo" => }
- """)
-
property("only literal string arguments") = fails(
"Quasiquotes can only be used with literal strings",
"""
@@ -52,13 +46,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
StringContext("\"", "\"").q(x)
""")
- property("expected different cardinality") = fails(
- "Can't splice List[reflect.runtime.universe.Tree] with ..., consider using ..",
- """
- val args: List[Tree] = Nil
- q"f(...$args)"
- """)
-
property("non-liftable type ..") = fails(
"Can't splice List[StringBuilder] with .., consider omitting the dots or providing an implicit instance of Liftable[StringBuilder]",
"""
@@ -90,13 +77,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
q"$xs"
""")
- property("use zero card") = fails(
- "Can't splice reflect.runtime.universe.Tree with .., consider omitting the dots",
- """
- val t = EmptyTree
- q"f(..$t)"
- """)
-
property("not liftable or natively supported") = fails(
"Can't splice StringBuilder, consider providing an implicit instance of Liftable[StringBuilder]",
"""
@@ -154,12 +134,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
q"$m1 $m2 def foo"
""")
- property("can't extract with .. card here") = fails(
- "Can't extract with .. here",
- """
- val q"f(..$xs, $y)" = EmptyTree
- """)
-
property("can't extract mods with annots") = fails(
"Can't extract modifiers together with annotations, consider extracting just modifiers",
"""
@@ -188,4 +162,4 @@ object ErrorProps extends QuasiquoteProperties("errors") {
// // Make sure a nice error is reported in this case
// { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" }
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
index cccf8095db..c8e66c7ef5 100644
--- a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
@@ -22,8 +22,18 @@ object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstr
pat0 ≈ pat && subpat0 ≈ subpat
}
+ property("extract apply many") = forAll { (pat: Tree, subpats: List[Tree]) =>
+ val pq"$pat0(..$subpats0)" = pq"$pat(..$subpats)"
+ pat0 ≈ pat && subpats0 ≈ subpats
+ }
+
+ property("extract apply last") = forAll { (pat: Tree, subpats: List[Tree], subpatlast: Tree) =>
+ val pq"$pat0(..$subpats0, $subpatlast0)" = pq"$pat(..$subpats, $subpatlast)"
+ pat0 ≈ pat && subpats0 ≈ subpats && subpatlast0 ≈ subpatlast
+ }
+
property("extract casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
val cq"$pat0 if $cond0 => $body0" = cq"$pat if $cond => $body"
pat0 ≈ pat && cond0 ≈ cond && body0 ≈ body
}
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
index e4ee5dfcae..589b8d4d72 100644
--- a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
+++ b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
@@ -116,10 +116,5 @@ trait Helpers {
}
}
- def annot(name: String): Tree = annot(TypeName(name), Nil)
- def annot(name: TypeName): Tree = annot(name, Nil)
- def annot(name: String, args: List[Tree]): Tree = annot(TypeName(name), args)
- def annot(name: TypeName, args: List[Tree]): Tree = q"new $name(..$args)"
-
val scalapkg = build.setSymbol(Ident(TermName("scala")), definitions.ScalaPackage)
}
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
index 38fbfa9f7f..058880a25c 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -85,19 +85,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun)
}
- property("splice names into import selector") = forAll {
- (expr: Tree, plain: Name, oldname: Name, newname: Name, discard: Name) =>
-
- val Import(expr1, List(
- ImportSelector(plain11, _, plain12, _),
- ImportSelector(oldname1, _, newname1, _),
- ImportSelector(discard1, _, wildcard, _))) =
- q"import $expr.{$plain, $oldname => $newname, $discard => _}"
-
- expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
- oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD
- }
-
property("splice trees into while loop") = forAll { (cond: Tree, body: Tree) =>
val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body"
body1 ≈ body && cond1 ≈ cond
@@ -116,7 +103,7 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
def blockInvariant(quote: Tree, trees: List[Tree]) =
quote ≈ (trees match {
- case Nil => q"()"
+ case Nil => q""
case _ :+ last if !last.isTerm => Block(trees, q"()")
case head :: Nil => head
case init :+ last => Block(init, last)
@@ -229,4 +216,71 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
val q"($a, $b) => $_" = q"_ + _"
assert(a.name != b.name)
}
+
+ property("SI-7275 a") = test {
+ val t = q"stat1; stat2"
+ assertEqAst(q"..$t", "{stat1; stat2}")
+ }
+
+ property("SI-7275 b") = test {
+ def f(t: Tree) = q"..$t"
+ assertEqAst(f(q"stat1; stat2"), "{stat1; stat2}")
+ }
+
+ property("SI-7275 c1") = test {
+ object O
+ implicit val liftO = Liftable[O.type] { _ => q"foo; bar" }
+ assertEqAst(q"f(..$O)", "f(foo, bar)")
+ }
+
+ property("SI-7275 c2") = test {
+ object O
+ implicit val liftO = Liftable[O.type] { _ => q"{ foo; bar }; { baz; bax }" }
+ assertEqAst(q"f(...$O)", "f(foo, bar)(baz, bax)")
+ }
+
+ property("SI-7275 d") = test {
+ val l = q"a; b" :: q"c; d" :: Nil
+ assertEqAst(q"f(...$l)", "f(a, b)(c, d)")
+ val l2: Iterable[Tree] = l
+ assertEqAst(q"f(...$l2)", "f(a, b)(c, d)")
+ }
+
+ property("SI-7275 e") = test {
+ val t = q"{ a; b }; { c; d }"
+ assertEqAst(q"f(...$t)", "f(a, b)(c, d)")
+ }
+
+ property("SI-7275 e2") = test {
+ val t = q"{ a; b }; c; d"
+ assertEqAst(q"f(...$t)", "f(a, b)(c)(d)")
+ }
+
+ property("remove synthetic unit") = test {
+ val q"{ ..$stats1 }" = q"{ def x = 2 }"
+ assert(stats1 ≈ List(q"def x = 2"))
+ val q"{ ..$stats2 }" = q"{ class X }"
+ assert(stats2 ≈ List(q"class X"))
+ val q"{ ..$stats3 }" = q"{ type X = Int }"
+ assert(stats3 ≈ List(q"type X = Int"))
+ val q"{ ..$stats4 }" = q"{ val x = 2 }"
+ assert(stats4 ≈ List(q"val x = 2"))
+ }
+
+ property("don't remove user-defined unit") = test {
+ val q"{ ..$stats }" = q"{ def x = 2; () }"
+ assert(stats ≈ List(q"def x = 2", q"()"))
+ }
+
+ property("empty-tree as block") = test {
+ val q"{ ..$stats1 }" = q" "
+ assert(stats1.isEmpty)
+ val stats2 = List.empty[Tree]
+ assert(q"{ ..$stats2 }" ≈ q"")
+ }
+
+ property("consistent variable order") = test {
+ val q"$a = $b = $c = $d = $e = $f = $g = $h = $k = $l" = q"a = b = c = d = e = f = g = h = k = l"
+ assert(a ≈ q"a" && b ≈ q"b" && c ≈ q"c" && d ≈ q"d" && e ≈ q"e" && g ≈ q"g" && h ≈ q"h" && k ≈ q"k" && l ≈ q"l")
+ }
}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
index 8d1ada342a..148bb383b0 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -29,14 +29,34 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
y1 ≈ x1 && y2 ≈ x2 && ys ≈ List(x3)
}
+ property("f(y1, ..ys, yn)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f($y1, ..$ys, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ y1 ≈ x1 && ys ≈ List(x2, x3) && yn ≈ x4
+ }
+
+ property("f(..ys, y_{n-1}, y_n)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(..$ys, $yn1, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ ys ≈ List(x1, x2) && yn1 ≈ x3 && yn ≈ x4
+ }
+
property("f(...xss)") = forAll { (x1: Tree, x2: Tree) =>
- val q"f(...$argss)" = q"f($x1)($x2)"
- argss ≈ List(List(x1), List(x2))
+ val q"f(...$xss)" = q"f($x1)($x2)"
+ xss ≈ List(List(x1), List(x2))
+ }
+
+ property("f(...$xss)(..$last)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f(...$xss)(..$last)" = q"f($x1)($x2)($x3)"
+ xss ≈ List(List(x1), List(x2)) && last ≈ List(x3)
+ }
+
+ property("f(...$xss)(..$lastinit, $lastlast)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(...$xss)(..$lastinit, $lastlast)" = q"f($x1)($x2, $x3, $x4)"
+ xss ≈ List(List(x1)) && lastinit ≈ List(x2, x3) && lastlast ≈ x4
}
property("f(...xss) = f") = forAll { (x1: Tree, x2: Tree) =>
- val q"f(...$argss)" = q"f"
- argss ≈ List()
+ val q"f(...$xss)" = q"f"
+ xss ≈ List()
}
property("deconstruct unit as tuple") = test {
@@ -51,12 +71,27 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
property("deconstruct tuple mixed") = test {
val q"($first, ..$rest)" = q"(a, b, c)"
- assert(first ≈ q"a" && rest ≈ List(q"b", q"c"))
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ }
+
+ property("deconstruct tuple last element") = test {
+ val q"($first, ..$rest, $last)" = q"(a, b, c, d)"
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ assert(last ≈ q"d")
}
property("deconstruct cases") = test {
val q"$x match { case ..$cases }" = q"x match { case 1 => case 2 => }"
- x ≈ q"x" && cases ≈ List(cq"1 =>", cq"2 =>")
+ assert(x ≈ q"x")
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ }
+
+ property("deconstruct splitting last case") = test {
+ val q"$_ match { case ..$cases case $last }" = q"x match { case 1 => case 2 => case 3 => }"
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ assert(last ≈ cq"3 =>")
}
property("deconstruct block") = test {
@@ -64,6 +99,12 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
assert(xs ≈ List(q"x1", q"x2", q"x3"))
}
+ property("deconstruct last element of a block") = test {
+ val q"{ ..$xs; $x }" = q"x1; x2; x3; x4"
+ assert(xs ≈ List(q"x1", q"x2", q"x3"))
+ assert(x ≈ q"x4")
+ }
+
property("exhaustive function matcher") = test {
def matches(line: String) { val q"(..$args) => $body" = parse(line) }
matches("() => bippy")
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
index be7a96d91e..78b54a4e49 100644
--- a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
@@ -28,4 +28,10 @@ object TypeConstructionProps extends QuasiquoteProperties("type construction")
val restpe = tq"C"
assert(tq"..$argtpes => $restpe" ≈ tq"(A, B) => C")
}
-} \ No newline at end of file
+
+ property("empty tq") = test {
+ val tt: TypeTree = tq""
+ assert(tt.tpe == null)
+ assert(tt.original == null)
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
index 499f5d6d8e..0fdcc19052 100644
--- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
@@ -13,23 +13,49 @@ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction
a ≈ Ident(name1) && b ≈ Ident(name2)
}
- property("tuple type") = test {
+ property("tuple type (1)") = test {
val tq"(..$empty)" = tq"_root_.scala.Unit"
assert(empty.isEmpty)
+ }
+
+ property("tuple type (2)") = test {
val tq"(..$ts)" = tq"(t1, t2)"
assert(ts ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (3)") = test {
val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
- assert(head ≈ tq"t0" && tail ≈ List(tq"t1", tq"t2"))
+ assert(head ≈ tq"t0")
+ assert(tail ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (4)") = test {
+ val tq"(..$init, $last)" = tq"(t0, t1, t2)"
+ assert(init ≈ List(tq"t0", tq"t1"))
+ assert(last ≈ tq"t2")
}
property("refined type") = test {
val tq"T { ..$stats }" = tq"T { def foo; val x: Int; type Y = String }"
- assert(stats ≈ (q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil))
+ assert(stats ≈ List(q"def foo", q"val x: Int", q"type Y = String"))
}
- property("function type") = test {
+ property("function type (1)") = test {
val tq"..$argtpes => $restpe" = tq"(A, B) => C"
- assert(argtpes ≈ (tq"A" :: tq"B" :: Nil))
+ assert(argtpes ≈ List(tq"A", tq"B"))
assert(restpe ≈ tq"C")
}
-} \ No newline at end of file
+
+ property("function type (2)") = test {
+ val tq"(..$argtpes, $arglast) => $restpe" = tq"(A, B, C) => D"
+ assert(argtpes ≈ List(tq"A", tq"B"))
+ assert(arglast ≈ tq"C")
+ assert(restpe ≈ tq"D")
+ }
+
+ property("match empty type tree") = test {
+ val tq"" = TypeTree()
+ // matches because type tree isn't syntactic without original
+ val tq"" = tq"${typeOf[Int]}"
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
index 2f501435e3..3afb47952c 100644
--- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
@@ -75,4 +75,11 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked") {
assert(f.original ≈ pq"Test.this.Cell")
assert(args ≈ List(pq"v"))
}
-} \ No newline at end of file
+
+ property("extract inferred val type") = test {
+ val typechecked = typecheck(q"val x = 42")
+ val q"val x = 42" = typechecked
+ val q"val x: ${tq""} = 42" = typechecked
+ val q"val x: ${t: Type} = 42" = typechecked
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index c62b074483..0d6f43db06 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -476,4 +476,35 @@ class SetMapConsistencyTest {
}
assert(test)
}
+
+ @Test
+ def testSI8213() {
+ val am = new scala.collection.mutable.AnyRefMap[String, Int]
+ for (i <- 0 until 1024) am += i.toString -> i
+ am.getOrElseUpdate("1024", { am.clear; -1 })
+ assert(am == scala.collection.mutable.AnyRefMap("1024" -> -1))
+ val lm = new scala.collection.mutable.LongMap[Int]
+ for (i <- 0 until 1024) lm += i.toLong -> i
+ lm.getOrElseUpdate(1024, { lm.clear; -1 })
+ assert(lm == scala.collection.mutable.LongMap(1024L -> -1))
+ }
+
+ // Mutating when an iterator is in the wild shouldn't produce random junk in the iterator
+ // Todo: test all sets/maps this way
+ @Test
+ def testSI8154() {
+ def f() = {
+ val xs = scala.collection.mutable.AnyRefMap[String, Int]("a" -> 1)
+ val it = xs.iterator
+ it.hasNext
+ xs.clear()
+
+ if (it.hasNext) Some(it.next)
+ else None
+ }
+ assert(f() match {
+ case Some((a,b)) if (a==null || b==null) => false
+ case _ => true
+ })
+ }
}
diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
new file mode 100644
index 0000000000..903e705ba2
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
@@ -0,0 +1,55 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class SourceFileTest {
+ def lineContentOf(code: String, offset: Int) =
+ Position.offset(new BatchSourceFile("", code), offset).lineContent
+
+ @Test
+ def si8205_overflow(): Unit = {
+ val file = new BatchSourceFile("", "code no newline")
+ // the bug in lineToString counted until MaxValue, and the AIOOBE came from here
+ assertFalse(file.isEndOfLine(Int.MaxValue))
+ }
+
+ @Test
+ def si8205_lineToString(): Unit = {
+ assertEquals("", lineContentOf("", 0))
+ assertEquals("abc", lineContentOf("abc", 0))
+ assertEquals("abc", lineContentOf("abc", 3))
+ assertEquals("code no newline", lineContentOf("code no newline", 1))
+ assertEquals("", lineContentOf("\n", 0))
+ assertEquals("abc", lineContentOf("abc\ndef", 0))
+ assertEquals("abc", lineContentOf("abc\ndef", 3))
+ assertEquals("def", lineContentOf("abc\ndef", 4))
+ assertEquals("def", lineContentOf("abc\ndef", 6))
+ assertEquals("def", lineContentOf("abc\ndef\n", 7))
+ }
+
+ @Test
+ def CRisEOL(): Unit = {
+ assertEquals("", lineContentOf("\r", 0))
+ assertEquals("abc", lineContentOf("abc\rdef", 0))
+ assertEquals("abc", lineContentOf("abc\rdef", 3))
+ assertEquals("def", lineContentOf("abc\rdef", 4))
+ assertEquals("def", lineContentOf("abc\rdef", 6))
+ assertEquals("def", lineContentOf("abc\rdef\r", 7))
+ }
+
+ @Test
+ def CRNLisEOL(): Unit = {
+ assertEquals("", lineContentOf("\r\n", 0))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 0))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 3))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 4))
+ assertEquals("def", lineContentOf("abc\r\ndef", 5))
+ assertEquals("def", lineContentOf("abc\r\ndef", 7))
+ assertEquals("def", lineContentOf("abc\r\ndef", 8))
+ assertEquals("def", lineContentOf("abc\r\ndef\r\n", 9))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
index a3699a4eeb..b42e9a07cb 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -69,6 +69,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
// Members declared in scala.reflect.internal.Required
def picklerPhase: scala.reflect.internal.Phase = SomePhase
+ def erasurePhase: scala.reflect.internal.Phase = SomePhase
// Members declared in scala.reflect.internal.SymbolTable
def currentRunId: Int = 1
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
index 68fedb7c83..d601f04f89 100644
--- a/test/osgi/src/BasicReflection.scala
+++ b/test/osgi/src/BasicReflection.scala
@@ -2,6 +2,8 @@ package tools.test.osgi
package reflection
package basic
+import scala.language.higherKinds
+
import org.junit.Assert._
import org.ops4j.pax.exam.CoreOptions._
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
index 7b14cf20e8..084afe8643 100644
--- a/test/osgi/src/ScalaOsgiHelper.scala
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -1,5 +1,5 @@
package tools.test.osgi
-
+
import org.ops4j.pax.exam.CoreOptions._
import org.ops4j.pax.exam
import java.io.File
@@ -12,7 +12,7 @@ trait ScalaOsgiHelper {
}
private def filteredBundleFiles(names: String*): Array[exam.Option] =
- for(bundle <- allBundleFiles; if names exists (bundle.getName contains))
+ for(bundle <- allBundleFiles; if names exists (bundle.getName contains _))
yield makeBundle(bundle)
private def makeBundle(file: File): exam.Option =
@@ -34,5 +34,5 @@ trait ScalaOsgiHelper {
val bundles = filteredBundleFiles("scala-library")
bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
}
-
+
}
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
index e673da5a29..b26c442599 100644
--- a/test/pending/run/idempotency-partial-functions.scala
+++ b/test/pending/run/idempotency-partial-functions.scala
@@ -22,7 +22,7 @@ object Test extends App {
val tb = cm.mkToolBox()
val tpartials = tb.typecheck(partials.tree)
println(tpartials)
- val rtpartials = tb.resetAllAttrs(tpartials)
+ val rtpartials = tb.untypecheck(tpartials)
println(tb.eval(rtpartials))
}
Test.main(null) \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_0.scala b/test/scaladoc/resources/SI-4014_0.scala
new file mode 100644
index 0000000000..c398fcc1e0
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_0.scala
@@ -0,0 +1,4 @@
+/** A template without authors.
+ *
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_1.scala b/test/scaladoc/resources/SI-4014_1.scala
new file mode 100644
index 0000000000..34386b515e
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_1.scala
@@ -0,0 +1,5 @@
+/** A template with one author.
+ *
+ * @author The Only Author
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_2.scala b/test/scaladoc/resources/SI-4014_2.scala
new file mode 100644
index 0000000000..514f7a1e4c
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_2.scala
@@ -0,0 +1,6 @@
+/** A template with more than one author.
+ *
+ * @author The First Author
+ * @author The Second Author
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/run/t7124.check b/test/scaladoc/run/t7124.check
new file mode 100644
index 0000000000..96b627a322
--- /dev/null
+++ b/test/scaladoc/run/t7124.check
@@ -0,0 +1,3 @@
+List()
+List(Paragraph(Text(macro)))
+Done.
diff --git a/test/scaladoc/run/t7124.scala b/test/scaladoc/run/t7124.scala
new file mode 100644
index 0000000000..e8272000d2
--- /dev/null
+++ b/test/scaladoc/run/t7124.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.language.experimental.macros
+ class Test {
+ def print(): Unit = macro ???
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val p = root._class("Test")._method("print")
+
+ println(p.annotations) // no annotations
+ println(p.flags) // a 'macro' flag
+ }
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 03348b81d2..56328ea875 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -47,6 +47,7 @@ object Test extends Properties("HtmlFactory") {
settings.scaladocQuietRun = true
settings.nowarn.value = true
settings.classpath.value = getClasspath
+ settings.docAuthor.value = true
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
new DocFactory(reporter, settings)
@@ -563,12 +564,13 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for override") =
checkText("explicit-inheritance-override.scala")(
(Some("InheritDocDerived"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
Starting line
Starting line
The base comment. And another sentence...
The base comment. And another sentence...
Ending line
+ Author: StartAuthor a Scala developer EndAuthor
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
@@ -589,12 +591,13 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for usecase") =
checkText("explicit-inheritance-usecase.scala")(
(Some("UseCaseInheritDoc"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
[use case] Starting line
[use case] Starting line
The base comment. And another sentence...
The base comment. And another sentence...
Ending line
+ Author: StartAuthor a Scala developer EndAuthor
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
@@ -663,6 +666,45 @@ object Test extends Properties("HtmlFactory") {
}
}
+ property("SI-4014: Scaladoc omits @author: no authors") = {
+ val noAuthors = createTemplates("SI-4014_0.scala")("Foo.html")
+
+ noAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("Author")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: one author") = {
+ val oneAuthor = createTemplates("SI-4014_1.scala")("Foo.html")
+
+ oneAuthor match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Author:</h6>")
+ s.contains("<p>The Only Author\n</p>")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: two authors") = {
+ val twoAuthors = createTemplates("SI-4014_2.scala")("Foo.html")
+
+ twoAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Authors:</h6>")
+ s.contains("<p>The First Author\n</p>")
+ s.contains("<p>The Second Author\n</p>")
+ }
+ case _ => false
+ }
+ }
+
{
val files = createTemplates("basic.scala")
//println(files)
diff --git a/versions.properties b/versions.properties
index 62ba7ac379..2ea8e713f1 100644
--- a/versions.properties
+++ b/versions.properties
@@ -1,15 +1,16 @@
-starr.version=2.11.0-M7
+#Sun, 19 Jan 2014 23:57:57 +0100
+starr.version=2.11.0-M8
starr.use.released=1
# These are the versions of the modules that go with this release.
# These properties are used during PR validation and in dbuild builds.
-scala.binary.version=2.11.0-M7
+scala.binary.version=2.11.0-M8
# external modules shipped with distribution:
scala-xml.version.number=1.0.0-RC7
scala-parser-combinators.version.number=1.0.0-RC5
-scala-continuations-plugin.version.number=1.0.0-RC2
-scala-continuations-library.version.number=1.0.0-RC2
+scala-continuations-plugin.version.number=1.0.0-RC3
+scala-continuations-library.version.number=1.0.0-RC3
scala-swing.version.number=1.0.0-RC2
# these ship with distribution (and scala-library-all depends on them)
@@ -18,7 +19,7 @@ actors-migration.version.number=1.0.0
# external modules, used internally (not shipped)
partest.version.number=1.0.0-RC8
-scalacheck.version.number=1.11.1
+scalacheck.version.number=1.11.3
# TODO: modularize the compiler
#scala-compiler-doc.version.number=1.0.0-RC1