summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.mailmap2
-rwxr-xr-xbuild.xml2
-rw-r--r--src/build/maven/scala-actors-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-doc-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-interactive-pom.xml11
-rw-r--r--src/build/maven/scala-compiler-pom.xml13
-rw-r--r--src/build/maven/scala-dist-pom.xml11
-rw-r--r--src/build/maven/scala-library-all-pom.xml11
-rw-r--r--src/build/maven/scala-library-pom.xml11
-rw-r--r--src/build/maven/scala-reflect-pom.xml11
-rw-r--r--src/build/maven/scalap-pom.xml11
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala14
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala55
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala33
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala21
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala32
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala34
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala4
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala6
-rw-r--r--src/compiler/scala/tools/reflect/FormatInterpolator.scala329
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala171
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala6
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala12
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala37
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala6
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala44
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/test-junit/.classpath1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala16
-rw-r--r--src/library/scala/collection/immutable/List.scala63
-rw-r--r--src/library/scala/collection/immutable/Set.scala46
-rw-r--r--src/library/scala/collection/immutable/Stream.scala8
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala1
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala27
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala40
-rw-r--r--src/library/scala/util/matching/Regex.scala40
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala17
-rw-r--r--src/reflect/scala/reflect/api/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala181
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala19
-rw-r--r--src/reflect/scala/reflect/internal/FreshNames.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala35
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala87
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala157
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala6
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala145
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala12
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala8
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala34
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Evals.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala44
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala15
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala117
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala4
-rw-r--r--test/files/jvm/t6941/Analyzed_1.scala2
-rw-r--r--test/files/neg/accesses.check6
-rw-r--r--test/files/neg/accesses2.check10
-rw-r--r--test/files/neg/missing-param-type-tuple.check6
-rw-r--r--test/files/neg/names-defaults-neg.check2
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.check134
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.scala45
-rw-r--r--test/files/neg/t7325.check8
-rw-r--r--test/files/neg/t8143a.check5
-rw-r--r--test/files/neg/t8143a.scala15
-rw-r--r--test/files/neg/t8228.check4
-rw-r--r--test/files/neg/t8228.scala7
-rw-r--r--test/files/neg/t8237-default.check13
-rw-r--r--test/files/neg/t8237-default.scala29
-rw-r--r--test/files/pos/annotated-original/M_1.scala2
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--test/files/pos/t6169/Exist.java4
-rw-r--r--test/files/pos/t6169/ExistF.java4
-rw-r--r--test/files/pos/t6169/ExistIndir.java4
-rw-r--r--test/files/pos/t6169/OP.java1
-rw-r--r--test/files/pos/t6169/Skin.java1
-rw-r--r--test/files/pos/t6169/Skinnable.java3
-rw-r--r--test/files/pos/t6169/skinnable.scala14
-rw-r--r--test/files/pos/t6169/t6169.scala7
-rw-r--r--test/files/pos/t7322.scala11
-rw-r--r--test/files/pos/t7377/Macro_1.scala2
-rw-r--r--test/files/pos/t7516/A_1.scala2
-rw-r--r--test/files/pos/t8064/Macro_1.scala2
-rw-r--r--test/files/pos/t8170.scala27
-rw-r--r--test/files/pos/t8170b.scala25
-rw-r--r--test/files/pos/t8237.scala29
-rw-r--r--test/files/pos/t8237b.scala10
-rw-r--r--test/files/run/global-showdef.scala2
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala2
-rw-r--r--test/files/run/private-override.scala17
-rw-r--r--test/files/run/reflection-sync-potpourri.scala (renamed from test/pending/run/reflection-sync-potpourri.scala)2
-rw-r--r--test/files/run/stringinterpolation_macro-run.check5
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala15
-rw-r--r--test/files/run/t7240/Macros_1.scala2
-rw-r--r--test/files/run/t7319.check2
-rw-r--r--test/files/run/t7700.check2
-rw-r--r--test/files/run/t7700.scala17
-rw-r--r--test/files/run/t8233-bcode.flags1
-rw-r--r--test/files/run/t8233-bcode.scala18
-rw-r--r--test/files/run/t8233.scala18
-rw-r--r--test/files/run/t8245.scala14
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala52
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala75
-rw-r--r--test/files/scalacheck/quasiquotes/ErrorProps.scala12
-rw-r--r--test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala12
-rw-r--r--test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala5
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala18
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala53
-rw-r--r--test/files/scalacheck/quasiquotes/TypeConstructionProps.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala38
-rw-r--r--test/files/scalacheck/quasiquotes/TypecheckedProps.scala9
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala21
-rw-r--r--test/osgi/src/BasicReflection.scala2
-rw-r--r--test/osgi/src/ScalaOsgiHelper.scala6
-rw-r--r--test/pending/run/idempotency-partial-functions.scala2
-rw-r--r--test/scaladoc/resources/SI-4014_0.scala4
-rw-r--r--test/scaladoc/resources/SI-4014_1.scala5
-rw-r--r--test/scaladoc/resources/SI-4014_2.scala6
-rw-r--r--test/scaladoc/run/t7124.check3
-rw-r--r--test/scaladoc/run/t7124.scala22
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala46
151 files changed, 2279 insertions, 923 deletions
diff --git a/.mailmap b/.mailmap
index 810bda030c..7cab5ed019 100644
--- a/.mailmap
+++ b/.mailmap
@@ -10,6 +10,7 @@ Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@lampmac14.epfl.ch>
Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <axel22@gmail.com>
Alex Cruise <alex@cluonflux.com>
Alex Cruise <alex@cluonflux.com> <alex@metaforsoftware.com>
+A. P. Marki <som.snytt@gmail.com>
Antonio Cunei <antonio.cunei@typesafe.com>
Antonio Cunei <antonio.cunei@typesafe.com> <antonio.cunei@epfl.ch>
Buraq Emir <buraq@epfl.ch>
@@ -71,5 +72,6 @@ Unknown Committer <lost.soul@typesafe.com> <USER@epfl.ch>
Unknown Committer <lost.soul@typesafe.com> <noreply@epfl.ch>
Viktor Klang <viktor.klang@gmail.com>
Vincent Cremet <cremet@epfl.ch>
+Vladimir Nikolaev <vladimir.nikolaev9@gmail.com>
Vojin Jovanovic <vojin.jovanovic@epfl.ch>
Vojin Jovanovic <vojin.jovanovic@epfl.ch> <gvojin@gmail.com>
diff --git a/build.xml b/build.xml
index 0d057e280a..37c544fcdc 100755
--- a/build.xml
+++ b/build.xml
@@ -1677,7 +1677,7 @@ TODO:
<fileset dir="${build-docs.dir}/library"/>
</copy>
- <copy toDir="${dist.dir}/doc/api" overwrite="true" flatten="true">
+ <copy toDir="${dist.dir}/api" overwrite="true" flatten="true">
<file file="${scala-xml-javadoc}"/>
<file file="${scala-parser-combinators-javadoc}"/>
<file file="${scala-continuations-plugin-javadoc}"/>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
index 424ac2898c..a0ebcecad1 100644
--- a/src/build/maven/scala-actors-pom.xml
+++ b/src/build/maven/scala-actors-pom.xml
@@ -38,17 +38,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-doc-pom.xml b/src/build/maven/scala-compiler-doc-pom.xml
index 30161d2fea..8572e55b42 100644
--- a/src/build/maven/scala-compiler-doc-pom.xml
+++ b/src/build/maven/scala-compiler-doc-pom.xml
@@ -45,17 +45,6 @@
<version>@PARSER_COMBINATORS_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-interactive-pom.xml b/src/build/maven/scala-compiler-interactive-pom.xml
index d59f305a9f..ad8192b694 100644
--- a/src/build/maven/scala-compiler-interactive-pom.xml
+++ b/src/build/maven/scala-compiler-interactive-pom.xml
@@ -35,17 +35,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index 4a000b27a1..8ca18f6f14 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -39,7 +39,7 @@
<artifactId>scala-reflect</artifactId>
<version>@VERSION@</version>
</dependency>
- <!-- TODO modularize compiler: these dependencies will disappear then the compiler is modularized -->
+ <!-- TODO modularize compiler: these dependencies will disappear when the compiler is modularized -->
<dependency> <!-- for scala-compiler-doc -->
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
@@ -57,17 +57,6 @@
<optional>true</optional>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
index 413da928bb..9a566d231b 100644
--- a/src/build/maven/scala-dist-pom.xml
+++ b/src/build/maven/scala-dist-pom.xml
@@ -51,17 +51,6 @@
<version>@JLINE_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml
index f34a28e79a..b649c8c525 100644
--- a/src/build/maven/scala-library-all-pom.xml
+++ b/src/build/maven/scala-library-all-pom.xml
@@ -75,17 +75,6 @@
<version>@ACTORS_MIGRATION_VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index d40cee2656..78fc05a7c3 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -33,17 +33,6 @@
</properties>
<dependencies>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
index d0a9c0e274..c21caefcf2 100644
--- a/src/build/maven/scala-reflect-pom.xml
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -38,17 +38,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index 88cfce08d8..236ac999fc 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -35,17 +35,6 @@
<version>@VERSION@</version>
</dependency>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index cd3db74016..c1ab17027f 100644
--- a/src/compiler/scala/reflect/macros/contexts/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -46,9 +46,7 @@ trait Typers {
universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
}
- def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(universe.duplicateAndKeepPositions(tree))
+ def resetLocalAttrs(tree: Tree): Tree = universe.resetAttrs(universe.duplicateAndKeepPositions(tree))
- def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree))
-
- def untypecheck(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree))
+ def untypecheck(tree: Tree): Tree = resetLocalAttrs(tree)
}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index ad0632f93e..6b0a0ee8d7 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -86,7 +86,7 @@ abstract class Reifier extends States
throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString))
}
- // todo. why do we resetAllAttrs?
+ // todo. why do we reset attrs?
//
// typically we do some preprocessing before reification and
// the code emitted/moved around during preprocessing is very hard to typecheck, so we leave it as it is
@@ -109,19 +109,11 @@ abstract class Reifier extends States
//
// todo. this is a common problem with non-trivial macros in our current macro system
// needs to be solved some day
- // maybe try `resetLocalAttrs` once the dust settles
- var importantSymbols = Set[Symbol](
- NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
- ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, runDefinitions.ReflectRuntimeCurrentMirror)
- importantSymbols ++= importantSymbols map (_.companionSymbol)
- importantSymbols ++= importantSymbols map (_.moduleClass)
- importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
- def isImportantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym)
- val untyped = resetAllAttrs(result, leaveAlone = {
+ // upd. a new hope: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ
+ val untyped = resetAttrs(result, leaveAlone = {
case ValDef(_, u, _, _) if u == nme.UNIVERSE_SHORT => true
case ValDef(_, m, _, _) if m == nme.MIRROR_SHORT => true
case tree if symtab.syms contains tree.symbol => true
- case tree if isImportantSymbol(tree.symbol) => true
case _ => false
})
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index f6b3c42ca9..b082796757 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -140,7 +140,7 @@ trait GenTrees {
if (sym == NoSymbol) {
// this sometimes happens, e.g. for binds that don't have a body
// or for untyped code generated during previous phases
- // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?")
+ // (see a comment in Reifiers about the latter, starting with "why do we reset attrs?")
mirrorCall(nme.Ident, reify(name))
}
else if (!sym.isLocalToReifee) {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 24bce0636d..7a7d4ac0b2 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -176,13 +176,24 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
}
- /** resets symbol and tpe fields in a tree, @see ResetAttrs
- */
-// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
-// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
- def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
+ // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler.
+ // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past.
+ //
+ // So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs.
+ // resetAllAttrs destroyed all symbols and types in the tree in order to reset its state to something suitable for retypechecking
+ // and/or embedding into bigger trees / different lexical scopes. (Btw here's some background on why people would want to use
+ // reset attrs in the first place: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ).
+ //
+ // However resetAllAttrs was more of a poison than of a treatment, because along with locally defined symbols that are the cause
+ // for almost every or maybe even every case of tree corruption, it erased external bindings that sometimes could not be restored.
+ // This is how we came up with resetLocalAttrs that left external bindings alone, and that was a big step forward.
+ // Then slowly but steadily we've evicted all usages of resetAllAttrs from our codebase in favor of resetLocalAttrs
+ // and have been living happily ever after.
+ //
+ // def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(localOnly = false, leaveAlone).transform(x)
+
+ /** @see ResetAttrs */
+ def resetAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(leaveAlone).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
@@ -193,7 +204,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*
* (bq:) This transformer has mutable state and should be discarded after use
*/
- private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
+ private class ResetAttrs(leaveAlone: Tree => Boolean = null) {
// this used to be based on -Ydebug, but the need for logging in this code is so situational
// that I've reverted to a hard-coded constant here.
val debug = false
@@ -277,29 +288,18 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
//
// The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
- // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // Here we take care of references to package classes (SI-5705).
// There are other non-idempotencies, but they are not worked around yet.
//
- // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
- //
- // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
- // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
- // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
- // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
- // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
- //
- // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
- // in one lexical scope, it can get resolved to something else.
- //
- // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
- // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
- // That's what localOnly and vetoScope are for.
+ // The second reason has to do with the fact that resetAttrs needs to be less destructive.
+ // Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not,
+ // therefore we want to retain those bindings, especially given that restoring them can be impossible
+ // if we move these trees into lexical contexts different from their original locations.
if (dupl.hasSymbol) {
val sym = dupl.symbol
- val vetoScope = localOnly && !(locals contains sym)
- val vetoLabel = keepLabels && sym.isLabel
+ val vetoScope = !(locals contains sym)
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
- if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ if (!(vetoScope || vetoThis)) dupl.symbol = NoSymbol
}
dupl.clearType()
}
@@ -308,10 +308,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
def transform(x: Tree): Tree = {
- if (localOnly)
new MarkLocals().traverse(x)
- if (localOnly && debug) {
+ if (debug) {
assert(locals.size == orderedLocals.size)
val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL
trace("locals (%d total): %n".format(orderedLocals.size))(msg)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 23a2e0b37f..e3d2bf14a0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1225,7 +1225,7 @@ self =>
// Like Swiss cheese, with holes
def stringCheese: Tree = atPos(in.offset) {
val start = in.offset
- val interpolator = in.name
+ val interpolator = in.name.encoded // ident() for INTERPOLATIONID
val partsBuf = new ListBuffer[Tree]
val exprBuf = new ListBuffer[Tree]
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 1332d01dbd..b650cdfa09 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -46,8 +46,10 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = NoCompilationUnit
override def run() {
- scalaPrimitives.init()
- classes.clear()
+ if (!settings.isBCodeActive) {
+ scalaPrimitives.init()
+ classes.clear()
+ }
super.run()
}
@@ -1007,8 +1009,15 @@ abstract class GenICode extends SubComponent {
}
// emit conversion
- if (generatedType != expectedType)
- adapt(generatedType, expectedType, resCtx, tree.pos)
+ if (generatedType != expectedType) {
+ tree match {
+ case Literal(Constant(null)) if generatedType == NullReference =>
+ // literal null on the stack (as opposed to a boxed null, see SI-8233),
+ // we can bypass `adapt` which would otherwise emitt a redundant [DROP, CONSTANT(null)]
+ case _ =>
+ adapt(generatedType, expectedType, resCtx, tree.pos)
+ }
+ }
resCtx
}
@@ -1058,6 +1067,9 @@ abstract class GenICode extends SubComponent {
case (NothingReference, _) =>
ctx.bb.emit(THROW(ThrowableClass))
ctx.bb.enterIgnoreMode()
+ case (NullReference, REFERENCE(_)) =>
+ // SI-8223 we can't assume that the stack contains a `null`, it might contain a Null$
+ ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
case _ if from isAssignabledTo to =>
()
case (_, UNIT) =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 267fa15312..64146585e5 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -21,7 +21,7 @@ trait Members {
import global._
- object NoCode extends Code(null, "NoCode") {
+ object NoCode extends Code(null, TermName("NoCode")) {
override def blocksList: List[BasicBlock] = Nil
}
@@ -29,8 +29,8 @@ trait Members {
* This class represents the intermediate code of a method or
* other multi-block piece of code, like exception handlers.
*/
- class Code(method: IMethod, name: String) {
- def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
+ class Code(method: IMethod, name: Name) {
+ def this(method: IMethod) = this(method, method.symbol.name)
/** The set of all blocks */
val blocks = mutable.ListBuffer[BasicBlock]()
@@ -82,7 +82,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'"
+ override def toString = "ICode '" + name.decoded + "'"
/* Compute a unique new label */
def nextLabel: Int = {
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 90c15bca61..55f45257dc 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -55,23 +55,26 @@ abstract class Pickler extends SubComponent {
case _ =>
}
}
- // If there are any erroneous types in the tree, then we will crash
- // when we pickle it: so let's report an error instead. We know next
- // to nothing about what happened, but our supposition is a lot better
- // than "bad type: <error>" in terms of explanatory power.
- for (t <- unit.body) {
- if (t.isErroneous) {
- unit.error(t.pos, "erroneous or inaccessible type")
- return
- }
- if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
- return
- }
+ try {
+ pickle(unit.body)
+ } catch {
+ case e: FatalError =>
+ for (t <- unit.body) {
+ // If there are any erroneous types in the tree, then we will crash
+ // when we pickle it: so let's report an error instead. We know next
+ // to nothing about what happened, but our supposition is a lot better
+ // than "bad type: <error>" in terms of explanatory power.
+ //
+ // OPT: do this only as a recovery after fatal error. Checking in advance was expensive.
+ if (t.isErroneous) {
+ if (settings.debug) e.printStackTrace()
+ unit.error(t.pos, "erroneous or inaccessible type")
+ return
+ }
+ }
+ throw e
}
-
- pickle(unit.body)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 4bbfc945f6..ccfddab94a 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -92,11 +92,22 @@ abstract class Erasure extends AddInterfaces
// more rigorous way up front rather than catching it after the fact,
// but that will be more involved.
private def dotCleanup(sig: String): String = {
+ // OPT 50% of time in generic signatures (~1% of compile time) was in this method, hence the imperative rewrite.
var last: Char = '\u0000'
- sig map {
- case '.' if last != '>' => last = '.' ; '$'
- case ch => last = ch ; ch
+ var i = 0
+ val len = sig.length
+ val copy: Array[Char] = sig.toCharArray
+ var changed = false
+ while (i < sig.length) {
+ val ch = copy(i)
+ if (ch == '.' && last != '>') {
+ copy(i) = '$'
+ changed = true
+ }
+ last = ch
+ i += 1
}
+ if (changed) new String(copy) else sig
}
/** This object is only used for sanity testing when -check:genjvm is set.
@@ -522,6 +533,8 @@ abstract class Erasure extends AddInterfaces
class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
val typer = this.asInstanceOf[analyzer.Typer]
+ override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
+
/** Replace member references as follows:
*
* - `x == y` for == in class Any becomes `x equals y` with equals in class Object.
@@ -760,7 +773,7 @@ abstract class Erasure extends AddInterfaces
|| super.exclude(sym)
|| !sym.hasTypeAt(currentRun.refchecksPhase.id)
)
- override def matches(sym1: Symbol, sym2: Symbol) = true
+ override def matches(lo: Symbol, high: Symbol) = true
}
def isErasureDoubleDef(pair: SymbolPair) = {
import pair._
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 4222c4d8c8..870eafbf20 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -24,15 +24,16 @@ abstract class OverridingPairs extends SymbolPairs {
/** Symbols to exclude: Here these are constructors and private/artifact symbols,
* including bridges. But it may be refined in subclasses.
*/
- override protected def exclude(sym: Symbol) = (sym hasFlag PRIVATE | ARTIFACT) || sym.isConstructor
+ override protected def exclude(sym: Symbol) = sym.isPrivateLocal || sym.isArtifact || sym.isConstructor
/** Types always match. Term symbols match if their member types
* relative to `self` match.
*/
- override protected def matches(sym1: Symbol, sym2: Symbol) = sym1.isType || (
- (sym1.owner != sym2.owner)
- && !exclude(sym2)
- && relatively.matches(sym1, sym2)
- )
+ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
+ (lo.owner != high.owner) // don't try to form pairs from overloaded members
+ && !high.isPrivate // private or private[this] members never are overriden
+ && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
+ && relatively.matches(lo, high)
+ ) // TODO we don't call exclude(high), should we?
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 3791af1629..c59b726076 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -306,6 +306,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
+ *
+ * !!! Is this safe in the face of the following?
+ * scala> trait T { def foo[A] = 0}; object O extends T { override def foo[B] = 0 }
*/
private def specializedName(sym: Symbol, env: TypeEnv): TermName = {
val tvars = (
@@ -391,13 +394,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* enclosing member with the annotation.
*/
private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
- !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+ !hasUnspecializableAnnotation(sym) && (
specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
|| sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
|| isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
)
)
+ private def hasUnspecializableAnnotation(sym: Symbol): Boolean =
+ sym.ownerChain.exists(_ hasAnnotation UnspecializedClass)
+
def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
case NormalizedMember(_) => true
case _ => false
@@ -433,10 +439,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else
specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if tp.isSpecialized => arg })
- case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: tparams.map(_.info))
+ case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: mapList(tparams)(symInfo)) // OPT
// since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
case NullaryMethodType(resTpe) => specializedTypeVars(resTpe)
- case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
+ case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: mapList(argSyms)(symTpe)) // OPT
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
@@ -907,16 +913,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (sym.isMethod) {
- val stvars = specializedTypeVars(sym)
- if (stvars.nonEmpty)
- debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
+ if (hasUnspecializableAnnotation(sym)) {
+ List()
+ } else {
+ val stvars = specializedTypeVars(sym)
+ if (stvars.nonEmpty)
+ debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
- val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
- val tps2 = tps1 filter stvars
- if (!sym.isDeferred)
- addConcreteSpecMethod(sym)
+ val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
+ val tps2 = tps1 filter stvars
+ if (!sym.isDeferred)
+ addConcreteSpecMethod(sym)
- specializeOn(tps2)
+ specializeOn(tps2)
+ }
}
else Nil
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b471d16ddd..5973c70583 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -128,6 +128,7 @@ abstract class TailCalls extends Transform {
logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+
}
object EmptyTailContext extends TailContext {
@@ -185,6 +186,18 @@ abstract class TailCalls extends Transform {
private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
+ override def transformUnit(unit: CompilationUnit): Unit = {
+ try {
+ super.transformUnit(unit)
+ } finally {
+ // OPT clear these after each compilation unit
+ failPositions.clear()
+ failReasons.clear()
+ accessed.clear()
+ }
+ }
+
/** Rewrite this tree to contain no tail recursive calls */
def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
@@ -218,12 +231,12 @@ abstract class TailCalls extends Transform {
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
- failReasons(ctx) = reason
+ if (ctx.isMandatory) failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
/* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- failPositions(ctx) = fun.pos
+ if (ctx.isMandatory) failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
@@ -237,7 +250,8 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
+ if (ctx.isMandatory && receiverIsSuper) // OPT expensive check, avoid unless we will actually report the error
+ failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
@@ -245,6 +259,11 @@ abstract class TailCalls extends Transform {
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
+
+ def isEligible(tree: DefDef) = {
+ val sym = tree.symbol
+ !(sym.hasAccessorFlag || sym.isConstructor)
+ }
tree match {
case ValDef(_, _, _, _) =>
@@ -253,7 +272,7 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
val newCtx = new DefDefTailContext(dd)
if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e193cf3de2..e7ea686bc8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -105,12 +105,11 @@ abstract class UnCurry extends InfoTransform
*/
def isByNameRef(tree: Tree) = (
tree.isTerm
- && !byNameArgs(tree)
&& (tree.symbol ne null)
&& (isByName(tree.symbol))
+ && !byNameArgs(tree)
)
-
// ------- Handling non-local returns -------------------------------------------------
/** The type of a non-local return expression with given argument type */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 94f8f509fc..b899cd8994 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -186,7 +186,7 @@ trait Checkable {
* additional conditions holds:
* - either A or B is effectively final
* - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin)
- * - both A and B are sealed, and every possible pairing of their children is irreconcilable
+ * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable
*
* TODO: the last two conditions of the last possibility (that the symbols are not of
* classes being compiled in the current run) are because this currently runs too early,
@@ -198,8 +198,9 @@ trait Checkable {
isEffectivelyFinal(sym1) // initialization important
|| isEffectivelyFinal(sym2)
|| !sym1.isTrait && !sym2.isTrait
- || sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+ || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
)
+ private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal
private def isEffectivelyFinal(sym: Symbol): Boolean = (
// initialization important
sym.initialize.isEffectivelyFinal || (
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4d0eda2377..2043eb5d5d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -418,7 +418,7 @@ trait ContextErrors {
case TypeRef(_, _, arg :: _) if arg.typeSymbol == TupleClass(funArity) && funArity > 1 =>
sm"""|
|Note: The expected type requires a one-argument function accepting a $funArity-Tuple.
- | Consider a pattern matching anoynmous function, `{ case $example => ... }`"""
+ | Consider a pattern matching anonymous function, `{ case $example => ... }`"""
case _ => ""
}
case _ => ""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 598b12b00d..5b7956a757 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -457,7 +457,9 @@ trait Contexts { self: Analyzer =>
c.prefix = prefixInChild
c.enclClass = if (isTemplateOrPackage) c else enclClass
c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
- c.enclMethod = if (isDefDef) c else enclMethod
+
+ // SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
+ c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
@@ -1303,7 +1305,7 @@ trait Contexts { self: Analyzer =>
var renamed = false
var selectors = tree.selectors
def current = selectors.head
- while (selectors.nonEmpty && result == NoSymbol) {
+ while ((selectors ne Nil) && result == NoSymbol) {
if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
if (name.isTypeName) current.name.toTypeName else current.name)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 847211945c..776920ed42 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -788,7 +788,7 @@ trait Implicits {
final class LocalShadower extends Shadower {
val shadowed = util.HashSet[Name](512)
def addInfos(infos: Infos) {
- shadowed addEntries infos.map(_.name)
+ infos.foreach(i => shadowed.addEntry(i.name))
}
def isShadowed(name: Name) = shadowed(name)
}
@@ -805,7 +805,6 @@ trait Implicits {
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && (info.sym eq Predef_conforms)
- || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -814,6 +813,7 @@ trait Implicits {
def survives(info: ImplicitInfo) = (
!isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded
&& isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt
+ && !shadower.isShadowed(info.name) // OPT rare, only check for plausible candidates
&& matchesPt(info) // stable and matches expected type
)
/** The implicits that are not valid because they come later in the source and
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index dd0923a696..997fd6fc65 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -595,6 +595,7 @@ trait Infer extends Checkable {
}
private[typechecker] def followApply(tp: Type): Type = tp match {
+ case _ if tp.isError => tp // SI-8228, `ErrorType nonPrivateMember nme.apply` returns an member with an erroneous type!
case NullaryMethodType(restp) =>
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 8c29c8d242..886dbed6d6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1069,8 +1069,9 @@ trait Namers extends MethodSynthesis {
}
def overriddenSymbol(resTp: Type) = {
+ lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala
intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
+ sym != NoSymbol && (site.memberType(sym) matches schema)
}
}
// TODO: see whether this or something similar would work instead:
@@ -1204,7 +1205,7 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetLocalAttrs(ddef.duplicate)
+ val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate)
// having defs here is important to make sure that there's no sneaky tree sharing
// in methods with multiple default parameters
def rtparams = rtparams0.map(_.duplicate)
@@ -1291,7 +1292,7 @@ trait Namers extends MethodSynthesis {
return // fix #3649 (prevent crash in erroneous source code)
}
}
- val ClassDef(_, _, rtparams, _) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, rtparams, _) = resetAttrs(cdef.duplicate)
defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT)))
nmr
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 46ff98875f..6a4df415ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -173,7 +173,7 @@ trait NamesDefaults { self: Analyzer =>
// setSymbol below is important because the 'selected' function might be overloaded. by
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
- val selectPos =
+ val selectPos =
if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end))
else baseFun.pos
val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos))
@@ -287,7 +287,7 @@ trait NamesDefaults { self: Analyzer =>
}
else {
// TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
- // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (SI-7516),
// which is important for (at least) macros.
arg.tpe
}
@@ -310,7 +310,7 @@ trait NamesDefaults { self: Analyzer =>
new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
if (repeated) arg match {
case WildcardStarArg(expr) => expr
- case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg))
+ case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg))
} else arg
}
Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 68d724b6fc..2125e281f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1414,7 +1414,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeRef(pre, sym, args) =>
tree match {
case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
- // FIXME: reconcile this check with one in resetAllAttrs
+ // FIXME: reconcile this check with one in resetAttrs
case _ => checkUndesiredProperties(sym, tree.pos)
}
if(sym.isJavaDefined)
@@ -1596,6 +1596,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkUnexpandedMacro(t: Tree) =
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro)
+ unit.error(t.pos, "macro has not been expanded")
+
override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
@@ -1755,6 +1759,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
varianceValidator.traverse(tt.original) // See SI-7872
case _ =>
}
+
+ checkUnexpandedMacro(result)
+
result
} catch {
case ex: TypeError =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 995f98cc2c..57f27a05fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -46,7 +46,7 @@ trait StdAttachments {
* The parameter is of type `Any`, because macros can expand both into trees and into annotations.
*/
def hasMacroExpansionAttachment(any: Any): Boolean = any match {
- case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case tree: Tree => tree.hasAttachment[MacroExpansionAttachment]
case _ => false
}
@@ -96,7 +96,7 @@ trait StdAttachments {
*/
def isMacroExpansionSuppressed(tree: Tree): Boolean =
( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
- || tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined
+ || tree.hasAttachment[SuppressMacroExpansionAttachment.type]
|| (tree match {
// we have to account for the fact that during typechecking an expandee might become wrapped,
// i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
@@ -150,7 +150,7 @@ trait StdAttachments {
/** Determines whether a tree should or should not be adapted,
* because someone has put MacroImplRefAttachment on it.
*/
- def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
+ def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type]
/** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected
* from typedNamedApply, the applyDynamicNamed argument rewriter, the latter
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 9776b1e80e..10fe530445 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -579,7 +579,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+
// Side effect time! Don't be an idiot like me and think you
// can move "val sym = tree.symbol" before this line, because
// inferExprAlternative side-effects the tree's symbol.
@@ -831,7 +832,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else tpr.typed(withImplicitArgs, mode, pt)
}
orElse { _ =>
- val resetTree = resetLocalAttrs(original)
+ val resetTree = resetAttrs(original)
debuglog(s"fallback on implicits: ${tree}/$resetTree")
val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
@@ -992,7 +993,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptMismatchedSkolems() = {
def canIgnoreMismatch = (
!context.reportErrors && isPastTyper
- || tree.attachments.get[MacroExpansionAttachment].isDefined
+ || tree.hasAttachment[MacroExpansionAttachment]
)
def bound = pt match {
case ExistentialType(qs, _) => qs
@@ -2232,14 +2233,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
DeprecatedParamNameError(p, n)
}
}
- }
- if (meth.isStructuralRefinementMember)
- checkMethodStructuralCompatible(ddef)
+ if (meth.isStructuralRefinementMember)
+ checkMethodStructuralCompatible(ddef)
- if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
- case List(param) :: _ if !param.isImplicit =>
- checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
- case _ =>
+ if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+ case List(param) :: _ if !param.isImplicit =>
+ checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+ case _ =>
+ }
}
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
@@ -2306,7 +2307,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
- val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
+ val LabelDef(_, _, rhs1) = resetAttrs(ldef)
+ val rhs2 = typed(rhs1, restpe)
ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
@@ -2588,7 +2590,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
//
// Well behaved trees satisfy the property:
//
- // typed(tree) == typed(resetLocalAttrs(typed(tree))
+ // typed(tree) == typed(resetAttrs(typed(tree))
//
// Trees constructed without low-level symbol manipulation get this for free;
// references to local symbols are cleared by `ResetAttrs`, but bind to the
@@ -3411,7 +3413,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen stableTypeFor arg orElse arg.tpe))
+ val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
@@ -3831,7 +3833,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// lifted out of typed1 because it's needed in typedImplicit0
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
- inferPolyAlternatives(fun, args map (_.tpe))
+ inferPolyAlternatives(fun, mapList(args)(treeTpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
@@ -3851,7 +3853,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
- val targs = args map (_.tpe)
+ val targs = mapList(args)(treeTpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (isPredefClassOf(fun.symbol))
typedClassOf(tree, args.head, noGen = true)
@@ -4871,7 +4873,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedHigherKindedType(arg, mode, pt)
}
- val argtypes = args1 map (_.tpe)
+ val argtypes = mapList(args1)(treeTpe)
foreach2(args, tparams) { (arg, tparam) =>
// note: can't use args1 in selector, because Binds got replaced
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index ffac29b4b8..cc2d9141ce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -52,13 +52,13 @@ trait Unapplies extends ast.TreeDSL {
}
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
- val ClassDef(_, _, _, Template(_, _, body)) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, _, Template(_, _, body)) = resetAttrs(cdef.duplicate)
val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body
vparamss
}
private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = {
- val ClassDef(_, _, tparams, _) = resetLocalAttrs(cdef.duplicate)
+ val ClassDef(_, _, tparams, _) = resetAttrs(cdef.duplicate)
val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT)))
tparamsInvariant
}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index bb0bbd79a3..8630ecf69e 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -20,8 +20,8 @@ trait FastTrack {
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
- new { val c: c0.type = c0 } with MacroImplementations
+ private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } =
+ new { val c: c0.type = c0 } with FormatInterpolator
private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
new { val c: c0.type = c0 } with QuasiquoteImpls
private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
@@ -48,7 +48,7 @@ trait FastTrack {
makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
- makeBlackbox( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ makeBlackbox( StringContext_f) { case _ => _.interpolate },
makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree },
makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote },
makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote }
diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
new file mode 100644
index 0000000000..d5e674ebae
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
@@ -0,0 +1,329 @@
+package scala.tools.reflect
+
+import scala.reflect.macros.runtime.Context
+import scala.collection.mutable.{ ListBuffer, Stack }
+import scala.reflect.internal.util.Position
+import scala.PartialFunction.cond
+import scala.util.matching.Regex.Match
+
+import java.util.{ Formatter, Formattable, IllegalFormatException }
+
+abstract class FormatInterpolator {
+ val c: Context
+ val global: c.universe.type = c.universe
+
+ import c.universe.{ Match => _, _ }
+ import definitions._
+ import treeInfo.Applied
+
+ @inline private def truly(body: => Unit): Boolean = { body ; true }
+ @inline private def falsely(body: => Unit): Boolean = { body ; false }
+
+ private def fail(msg: String) = c.abort(c.enclosingPosition, msg)
+ private def bail(msg: String) = global.abort(msg)
+
+ def interpolate: Tree = c.macroApplication match {
+ //case q"$_(..$parts).f(..$args)" =>
+ case Applied(Select(Apply(_, parts), _), _, argss) =>
+ val args = argss.flatten
+ def badlyInvoked = (parts.length != args.length + 1) && truly {
+ def because(s: String) = s"too $s arguments for interpolated string"
+ val (p, msg) =
+ if (parts.length == 0) (c.prefix.tree.pos, "there are no parts")
+ else if (args.length + 1 < parts.length)
+ (if (args.isEmpty) c.enclosingPosition else args.last.pos, because("few"))
+ else (args(parts.length-1).pos, because("many"))
+ c.abort(p, msg)
+ }
+ if (badlyInvoked) c.macroApplication else interpolated(parts, args)
+ case other =>
+ bail(s"Unexpected application ${showRaw(other)}")
+ other
+ }
+
+ /** Every part except the first must begin with a conversion for
+ * the arg that preceded it. If the conversion is missing, "%s"
+ * is inserted.
+ *
+ * In any other position, the only permissible conversions are
+ * the literals (%% and %n) or an index reference (%1$ or %<).
+ *
+ * A conversion specifier has the form:
+ *
+ * [index$][flags][width][.precision]conversion
+ *
+ * 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ * 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ * 3) "...${smth}%" => error
+ * 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ * 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ * 6) "...${smth}[%legalJavaConversion]" => okay*
+ * 7) "...${smth}[%illegalJavaConversion]" => error
+ * *Legal according to [[http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html]]
+ */
+ def interpolated(parts: List[Tree], args: List[Tree]) = {
+ val fstring = new StringBuilder
+ val evals = ListBuffer[ValDef]()
+ val ids = ListBuffer[Ident]()
+ val argStack = Stack(args: _*)
+
+ // create a tmp val and add it to the ids passed to format
+ def defval(value: Tree, tpe: Type): Unit = {
+ val freshName = TermName(c.freshName("arg$"))
+ evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
+ ids += Ident(freshName)
+ }
+ // Append the nth part to the string builder, possibly prepending an omitted %s first.
+ // Sanity-check the % fields in this part.
+ def copyPart(part: Tree, n: Int): Unit = {
+ import SpecifierGroups.{ Spec, Index }
+ val s0 = part match {
+ case Literal(Constant(x: String)) => x
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val s = StringContext.treatEscapes(s0)
+ val ms = fpat findAllMatchIn s
+
+ def errorLeading(op: Conversion) = op.errorAt(Spec, s"conversions must follow a splice; ${Conversion.literalHelp}")
+
+ def first = n == 0
+ // a conversion for the arg is required
+ if (!first) {
+ val arg = argStack.pop()
+ def s_%() = {
+ fstring append "%s"
+ defval(arg, AnyTpe)
+ }
+ def accept(op: Conversion) = {
+ if (!op.isLeading) errorLeading(op)
+ op.accepts(arg) match {
+ case Some(tpe) => defval(arg, tpe)
+ case None =>
+ }
+ }
+ if (ms.hasNext) {
+ Conversion(ms.next, part.pos, args.size) match {
+ case Some(op) if op.isLiteral => s_%()
+ case Some(op) if op.indexed =>
+ if (op.index map (_ == n) getOrElse true) accept(op)
+ else {
+ // either some other arg num, or '<'
+ c.warning(op.groupPos(Index), "Index is not this arg")
+ s_%()
+ }
+ case Some(op) => accept(op)
+ case None =>
+ }
+ } else s_%()
+ }
+ // any remaining conversions must be either literals or indexed
+ while (ms.hasNext) {
+ Conversion(ms.next, part.pos, args.size) match {
+ case Some(op) if first && op.hasFlag('<') => op.badFlag('<', "No last arg")
+ case Some(op) if op.isLiteral || op.indexed => // OK
+ case Some(op) => errorLeading(op)
+ case None =>
+ }
+ }
+ fstring append s
+ }
+
+ parts.zipWithIndex foreach {
+ case (part, n) => copyPart(part, n)
+ }
+
+ //q"{..$evals; ${fstring.toString}.format(..$ids)}"
+ locally {
+ val expr =
+ Apply(
+ Select(
+ Literal(Constant(fstring.toString)),
+ newTermName("format")),
+ ids.toList
+ )
+ val p = c.macroApplication.pos
+ Block(evals.toList, atPos(p.focus)(expr)) setPos p.makeTransparent
+ }
+ }
+
+ val fpat = """%(?:(\d+)\$)?([-#+ 0,(\<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r
+ object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value }
+
+ val stdContextTags = new { val tc: c.type = c } with StdContextTags
+ import stdContextTags._
+ val tagOfFormattable = typeTag[Formattable]
+
+ /** A conversion specifier matched by `m` in the string part at `pos`,
+ * with `argc` arguments to interpolate.
+ */
+ sealed trait Conversion {
+ def m: Match
+ def pos: Position
+ def argc: Int
+
+ import SpecifierGroups.{ Value => SpecGroup, _ }
+ private def maybeStr(g: SpecGroup) = Option(m group g.id)
+ private def maybeInt(g: SpecGroup) = maybeStr(g) map (_.toInt)
+ val index: Option[Int] = maybeInt(Index)
+ val flags: Option[String] = maybeStr(Flags)
+ val width: Option[Int] = maybeInt(Width)
+ val precision: Option[Int] = maybeStr(Precision) map (_.drop(1).toInt)
+ val op: String = maybeStr(CC) getOrElse ""
+
+ def cc: Char = if ("tT" contains op(0)) op(1) else op(0)
+
+ def indexed: Boolean = index.nonEmpty || hasFlag('<')
+ def isLiteral: Boolean = false
+ def isLeading: Boolean = m.start(0) == 0
+ def verify: Boolean = goodFlags && goodIndex
+ def accepts(arg: Tree): Option[Type]
+
+ val allFlags = "-#+ 0,(<"
+ def hasFlag(f: Char) = (flags getOrElse "") contains f
+ def hasAnyFlag(fs: String) = fs exists (hasFlag)
+
+ def badFlag(f: Char, msg: String) = {
+ val i = flags map (_.indexOf(f)) filter (_ >= 0) getOrElse 0
+ errorAtOffset(Flags, i, msg)
+ }
+ def groupPos(g: SpecGroup) = groupPosAt(g, 0)
+ def groupPosAt(g: SpecGroup, i: Int) = pos withPoint (pos.point + m.start(g.id) + i)
+ def errorAt(g: SpecGroup, msg: String) = c.error(groupPos(g), msg)
+ def errorAtOffset(g: SpecGroup, i: Int, msg: String) = c.error(groupPosAt(g, i), msg)
+
+ def noFlags = flags.isEmpty || falsely { errorAt(Flags, "flags not allowed") }
+ def noWidth = width.isEmpty || falsely { errorAt(Width, "width not allowed") }
+ def noPrecision = precision.isEmpty || falsely { errorAt(Precision, "precision not allowed") }
+ def only_-(msg: String) = {
+ val badFlags = (flags getOrElse "") filterNot { case '-' | '<' => true case _ => false }
+ badFlags.isEmpty || falsely { badFlag(badFlags(0), s"Only '-' allowed for $msg") }
+ }
+ protected def okFlags: String = allFlags
+ def goodFlags = {
+ val badFlags = flags map (_ filterNot (okFlags contains _))
+ for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'")
+ badFlags.getOrElse("").isEmpty
+ }
+ def goodIndex = {
+ if (index.nonEmpty && hasFlag('<'))
+ c.warning(groupPos(Index), "Argument index ignored if '<' flag is present")
+ val okRange = index map (i => i > 0 && i <= argc) getOrElse true
+ okRange || hasFlag('<') || falsely { errorAt(Index, "Argument index out of range") }
+ }
+ /** Pick the type of an arg to format from among the variants
+ * supported by a conversion. This is the type of the temporary,
+ * so failure results in an erroneous assignment to the first variant.
+ * A more complete message would be nice.
+ */
+ def pickAcceptable(arg: Tree, variants: Type*): Option[Type] =
+ variants find (arg.tpe <:< _) orElse (
+ variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree)
+ ) orElse Some(variants(0))
+ }
+ object Conversion {
+ import SpecifierGroups.{ Spec, CC, Width }
+ def apply(m: Match, p: Position, n: Int): Option[Conversion] = {
+ def badCC(msg: String) = {
+ val dk = new ErrorXn(m, p)
+ val at = if (dk.op.isEmpty) Spec else CC
+ dk.errorAt(at, msg)
+ }
+ def cv(cc: Char) = cc match {
+ case 'b' | 'B' | 'h' | 'H' | 's' | 'S' =>
+ new GeneralXn(m, p, n)
+ case 'c' | 'C' =>
+ new CharacterXn(m, p, n)
+ case 'd' | 'o' | 'x' | 'X' =>
+ new IntegralXn(m, p, n)
+ case 'e' | 'E' | 'f' | 'g' | 'G' | 'a' | 'A' =>
+ new FloatingPointXn(m, p, n)
+ case 't' | 'T' =>
+ new DateTimeXn(m, p, n)
+ case '%' | 'n' =>
+ new LiteralXn(m, p, n)
+ case _ =>
+ badCC(s"illegal conversion character '$cc'")
+ null
+ }
+ Option(m group CC.id) map (cc => cv(cc(0))) match {
+ case Some(x) => Option(x) filter (_.verify)
+ case None =>
+ badCC(s"Missing conversion operator in '${m.matched}'; $literalHelp")
+ None
+ }
+ }
+ val literalHelp = "use %% for literal %, %n for newline"
+ }
+ class GeneralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ def accepts(arg: Tree) = cc match {
+ case 's' | 'S' if hasFlag('#') => pickAcceptable(arg, tagOfFormattable.tpe)
+ case 'b' | 'B' => if (arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
+ case _ => Some(AnyTpe)
+ }
+ override protected def okFlags = cc match {
+ case 's' | 'S' => "-#<"
+ case _ => "-<"
+ }
+ }
+ class LiteralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ import SpecifierGroups.Width
+ override val isLiteral = true
+ override def verify = op match {
+ case "%" => super.verify && noPrecision && truly(width foreach (_ => c.warning(groupPos(Width), "width ignored on literal")))
+ case "n" => noFlags && noWidth && noPrecision
+ }
+ override protected val okFlags = "-"
+ def accepts(arg: Tree) = None
+ }
+ class CharacterXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = super.verify && noPrecision && only_-("c conversion")
+ def accepts(arg: Tree) = pickAcceptable(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
+ }
+ class IntegralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = {
+ def d_# = (cc == 'd' && hasFlag('#') &&
+ truly { badFlag('#', "# not allowed for d conversion") }
+ )
+ def x_comma = (cc != 'd' && hasFlag(',') &&
+ truly { badFlag(',', "',' only allowed for d conversion of integral types") }
+ )
+ super.verify && noPrecision && !d_# && !x_comma
+ }
+ override def accepts(arg: Tree) = {
+ def isBigInt = arg.tpe <:< tagOfBigInt.tpe
+ val maybeOK = "+ ("
+ def bad_+ = cond(cc) {
+ case 'o' | 'x' | 'X' if hasAnyFlag(maybeOK) && !isBigInt =>
+ maybeOK filter hasFlag foreach (badf =>
+ badFlag(badf, s"only use '$badf' for BigInt conversions to o, x, X"))
+ true
+ }
+ if (bad_+) None else pickAcceptable(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
+ }
+ }
+ class FloatingPointXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ override def verify = super.verify && (cc match {
+ case 'a' | 'A' =>
+ val badFlags = ",(" filter hasFlag
+ noPrecision && badFlags.isEmpty || falsely {
+ badFlags foreach (badf => badFlag(badf, s"'$badf' not allowed for a, A"))
+ }
+ case _ => true
+ })
+ def accepts(arg: Tree) = pickAcceptable(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
+ }
+ class DateTimeXn(val m: Match, val pos: Position, val argc: Int) extends Conversion {
+ import SpecifierGroups.CC
+ def hasCC = (op.length == 2 ||
+ falsely { errorAt(CC, "Date/time conversion must have two characters") })
+ def goodCC = ("HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc" contains cc) ||
+ falsely { errorAtOffset(CC, 1, s"'$cc' doesn't seem to be a date or time conversion") }
+ override def verify = super.verify && hasCC && goodCC && noPrecision && only_-("date/time conversions")
+ def accepts(arg: Tree) = pickAcceptable(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
+ }
+ class ErrorXn(val m: Match, val pos: Position) extends Conversion {
+ val argc = 0
+ override def verify = false
+ def accepts(arg: Tree) = None
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
deleted file mode 100644
index a9ed419b1e..0000000000
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ /dev/null
@@ -1,171 +0,0 @@
-package scala.tools.reflect
-
-import scala.reflect.macros.contexts.Context
-import scala.collection.mutable.ListBuffer
-import scala.collection.mutable.Stack
-import scala.reflect.internal.util.Position
-
-abstract class MacroImplementations {
- val c: Context
-
- import c.universe._
- import definitions._
-
- def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = {
- // the parts all have the same position information (as the expression is generated by the compiler)
- // the args have correct position information
-
- // the following conditions can only be violated if invoked directly
- if (parts.length != args.length + 1) {
- if(parts.length == 0)
- c.abort(c.prefix.tree.pos, "too few parts")
- else if(args.length + 1 < parts.length)
- c.abort(if(args.length==0) c.enclosingPosition else args.last.pos,
- "too few arguments for interpolated string")
- else
- c.abort(args(parts.length-1).pos,
- "too many arguments for interpolated string")
- }
-
- val pi = parts.iterator
- val bldr = new java.lang.StringBuilder
- val evals = ListBuffer[ValDef]()
- val ids = ListBuffer[Ident]()
- val argStack = Stack(args : _*)
-
- def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.freshName("arg$"))
- evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
- ids += Ident(freshName)
- }
-
- def isFlag(ch: Char): Boolean = {
- ch match {
- case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
- case _ => false
- }
- }
-
- def checkType(arg: Tree, variants: Type*): Option[Type] = {
- variants.find(arg.tpe <:< _).orElse(
- variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse(
- Some(variants(0))
- )
- )
- }
-
- val stdContextTags = new { val tc: c.type = c } with StdContextTags
- import stdContextTags._
-
- def conversionType(ch: Char, arg: Tree): Option[Type] = {
- ch match {
- case 'b' | 'B' =>
- if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
- case 'h' | 'H' =>
- Some(AnyTpe)
- case 's' | 'S' =>
- Some(AnyTpe)
- case 'c' | 'C' =>
- checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
- case 'd' | 'o' | 'x' | 'X' =>
- checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
- case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' =>
- checkType(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
- case 't' | 'T' =>
- checkType(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
- case _ => None
- }
- }
-
- def copyString(first: Boolean): Unit = {
- val strTree = pi.next()
- val rawStr = strTree match {
- case Literal(Constant(str: String)) => str
- case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
- }
- val str = StringContext.treatEscapes(rawStr)
- val strLen = str.length
- val strIsEmpty = strLen == 0
- def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
- def isPercent(idx: Int) = charAtIndexIs(idx, '%')
- def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
- var idx = 0
-
- def errorAtIndex(idx: Int, msg: String) = c.error(Position.offset(strTree.pos.source, strTree.pos.point + idx), msg)
- def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
- def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
- def nonEscapedPercent(idx: Int) = errorAtIndex(idx,
- "conversions must follow a splice; use %% for literal %, %n for newline")
-
- // STEP 1: handle argument conversion
- // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
- // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
- // 3) "...${smth}%" => error
- // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
- // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
- // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
- // 7) "...${smth}[%illegalJavaConversion]" => error
- if (!first) {
- val arg = argStack.pop()
- if (isConversion(0)) {
- // PRE str is not empty and str(0) == '%'
- // argument index parameter is not allowed, thus parse
- // [flags][width][.precision]conversion
- var pos = 1
- while (pos < strLen && isFlag(str charAt pos)) pos += 1
- while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- if (pos < strLen && str.charAt(pos) == '.') {
- pos += 1
- while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- }
- if (pos < strLen) {
- conversionType(str charAt pos, arg) match {
- case Some(tpe) => defval(arg, tpe)
- case None => illegalConversionCharacter(pos)
- }
- } else {
- wrongConversionString(pos - 1)
- }
- idx = 1
- } else {
- bldr append "%s"
- defval(arg, AnyTpe)
- }
- }
-
- // STEP 2: handle the rest of the text
- // 1) %n tokens are left as is
- // 2) %% tokens are left as is
- // 3) other usages of percents are reported as errors
- if (!strIsEmpty) {
- while (idx < strLen) {
- if (isPercent(idx)) {
- if (isConversion(idx)) nonEscapedPercent(idx)
- else idx += 1 // skip n and % in %n and %%
- }
- idx += 1
- }
- bldr append (str take idx)
- }
- }
-
- copyString(first = true)
- while (pi.hasNext) {
- copyString(first = false)
- }
-
- val fstring = bldr.toString
-// val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*))
-// https://issues.scala-lang.org/browse/SI-5824, therefore
- val expr =
- Apply(
- Select(
- Literal(Constant(fstring)),
- newTermName("format")),
- List(ids: _* )
- )
-
- Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
- }
-
-}
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index 4c1bc794bc..02a458214f 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -71,12 +71,6 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
*/
def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
- /** Recursively resets symbols and types in a given tree.
- * WARNING: Don't use this API, go for [[untypecheck]] instead.
- */
- @deprecated("Use `tb.untypecheck` instead", "2.11.0")
- def resetAllAttrs(tree: u.Tree): u.Tree
-
/** Recursively resets locally defined symbols and types in a given tree.
* WARNING: Don't use this API, go for [[untypecheck]] instead.
*/
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 4a8c91bd1b..541a915adb 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -236,7 +236,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
- val cleanedUp = resetLocalAttrs(moduledef)
+ val cleanedUp = resetAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp.asInstanceOf[ModuleDef]
}
@@ -385,18 +385,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
uitree
}
- def resetAllAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
- import compilerApi._
- val ctree: compiler.Tree = importer.importTree(tree)
- val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
- val uttree = exporter.importTree(ttree)
- uttree
- }
-
def resetLocalAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi =>
import compilerApi._
val ctree: compiler.Tree = importer.importTree(tree)
- val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
+ val ttree: compiler.Tree = compiler.resetAttrs(ctree)
val uttree = exporter.importTree(ttree)
uttree
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index 5669ec731f..825d0c04f3 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -17,7 +17,6 @@ trait Placeholders { self: Quasiquotes =>
// Step 1: Transform Scala source with holes into vanilla Scala source
- lazy val holeMap = new HoleMap()
lazy val posMap = mutable.ListMap[Position, (Int, Int)]()
lazy val code = {
val sb = new StringBuilder()
@@ -58,25 +57,27 @@ trait Placeholders { self: Quasiquotes =>
sb.toString
}
- class HoleMap {
- private var underlying = immutable.SortedMap[String, Hole]()
- private val accessed = mutable.Set[String]()
+ object holeMap {
+ private val underlying = mutable.LinkedHashMap.empty[String, Hole]
+ private val accessed = mutable.Set.empty[String]
def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
- def contains(key: Name) = underlying.contains(key.toString)
- def apply(key: Name) = {
- val s = key.toString
- accessed += s
- underlying(s)
- }
- def update(key: Name, hole: Hole) = {
+ def contains(key: Name): Boolean = underlying.contains(key.toString)
+ def apply(key: Name): Hole = {
+ val skey = key.toString
+ val value = underlying(skey)
+ accessed += skey
+ value
+ }
+ def update(key: Name, hole: Hole) =
underlying += key.toString -> hole
+ def get(key: Name): Option[Hole] = {
+ val skey = key.toString
+ underlying.get(skey).map { v =>
+ accessed += skey
+ v
+ }
}
- def get(key: Name) = {
- val s = key.toString
- accessed += s
- underlying.get(s)
- }
- def toList = underlying.toList
+ def keysIterator: Iterator[TermName] = underlying.keysIterator.map(TermName(_))
}
// Step 2: Transform vanilla Scala AST into an AST with holes
@@ -179,4 +180,4 @@ trait Placeholders { self: Quasiquotes =>
case _ => None
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index 3e703924e8..396688c437 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -16,7 +16,7 @@ abstract class Quasiquotes extends Parsers
lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
case Apply(build.SyntacticTypeApplied(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), _), args0) =>
- debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
+ debug(s"parse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
val parts1 = parts0.map {
case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
@@ -43,8 +43,8 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
- debug(s"\nmacro application:\n${c.macroApplication}\n")
- debug(s"\ncode to parse:\n$code\n")
+ debug(s"macro application:\n${c.macroApplication}\n")
+ debug(s"code to parse:\n$code\n")
val tree = parse(code)
debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 273245f7bd..017e966f63 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -29,7 +29,7 @@ trait Reifiers { self: Quasiquotes =>
/** Map that stores freshly generated names linked to the corresponding names in the reified tree.
* This information is used to reify names created by calls to freshTermName and freshTypeName.
*/
- var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+ val nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
/** Wraps expressions into:
* a block which starts with a sequence of vals that correspond
@@ -71,7 +71,7 @@ trait Reifiers { self: Quasiquotes =>
// q"..$freshdefs; $tree"
SyntacticBlock(freshdefs :+ tree)
} else {
- val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val freevars = holeMap.keysIterator.map(Ident(_)).toList
val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
val cases =
if(isVarPattern) {
@@ -162,7 +162,7 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
case SyntacticDefDef(mods, name, tparams, build.ImplicitParams(vparamss, implparams), tpt, rhs) =>
if (implparams.nonEmpty)
- mirrorBuildCall(nme.SyntacticDefDef, reify(mods), reify(name), reify(tparams),
+ mirrorBuildCall(nme.SyntacticDefDef, reify(mods), reify(name), reify(tparams),
reifyBuildCall(nme.ImplicitParams, vparamss, implparams), reify(tpt), reify(rhs))
else
reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
@@ -190,6 +190,10 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticFunction, args, body)
case SyntacticIdent(name, isBackquoted) =>
reifyBuildCall(nme.SyntacticIdent, name, isBackquoted)
+ case SyntacticEmptyTypeTree() =>
+ reifyBuildCall(nme.SyntacticEmptyTypeTree)
+ case SyntacticImport(expr, selectors) =>
+ reifyBuildCall(nme.SyntacticImport, expr, selectors)
case Q(Placeholder(Hole(tree, DotDot))) =>
mirrorBuildCall(nme.SyntacticBlock, tree)
case Q(other) =>
@@ -305,7 +309,7 @@ trait Reifiers { self: Quasiquotes =>
* > reifyMultiCardinalityList(lst) { ... } { ... }
* q"List($foo, $bar) ++ ${holeMap(qq$f3948f9s$1).tree}"
*/
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree
/** Reifies arbitrary list filling ..$x and ...$y holeMap when they are put
* in the correct position. Fallbacks to regular reification for non-high cardinality
@@ -361,10 +365,10 @@ trait Reifiers { self: Quasiquotes =>
}
class ApplyReifier extends Reifier(isReifyingExpressions = true) {
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree =
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree =
if (xs.isEmpty) mkList(Nil)
else {
- def reifyGroup(group: List[T]): Tree = group match {
+ def reifyGroup(group: List[Any]): Tree = group match {
case List(elem) if fill.isDefinedAt(elem) => fill(elem)
case elems => mkList(elems.map(fallback))
}
@@ -403,14 +407,26 @@ trait Reifiers { self: Quasiquotes =>
}
class UnapplyReifier extends Reifier(isReifyingExpressions = false) {
- def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree = xs match {
- case init :+ last if fill.isDefinedAt(last) =>
- init.foldRight[Tree](fill(last)) { (el, rest) =>
- val cons = Select(Select(Select(Ident(nme.scala_), nme.collection), nme.immutable), nme.CONS)
- Apply(cons, List(fallback(el), rest))
- }
- case _ =>
- mkList(xs.map(fallback))
+ private def collection = ScalaDot(nme.collection)
+ private def collectionColonPlus = Select(collection, nme.COLONPLUS)
+ private def collectionCons = Select(Select(collection, nme.immutable), nme.CONS)
+ private def collectionNil = Select(Select(collection, nme.immutable), nme.Nil)
+ // pq"$lhs :+ $rhs"
+ private def append(lhs: Tree, rhs: Tree) = Apply(collectionColonPlus, lhs :: rhs :: Nil)
+ // pq"$lhs :: $rhs"
+ private def cons(lhs: Tree, rhs: Tree) = Apply(collectionCons, lhs :: rhs :: Nil)
+
+ def reifyMultiCardinalityList(xs: List[Any])(fill: PartialFunction[Any, Tree])(fallback: Any => Tree): Tree = {
+ val grouped = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+ def appended(lst: List[Any], init: Tree) = lst.foldLeft(init) { (l, r) => append(l, fallback(r)) }
+ def prepended(lst: List[Any], init: Tree) = lst.foldRight(init) { (l, r) => cons(fallback(l), r) }
+ grouped match {
+ case init :: List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, prepended(init, fill(hole)))
+ case init :: List(hole) :: Nil if fill.isDefinedAt(hole) => prepended(init, fill(hole))
+ case List(hole) :: last :: Nil if fill.isDefinedAt(hole) => appended(last, fill(hole))
+ case List(hole) :: Nil if fill.isDefinedAt(hole) => fill(hole)
+ case _ => prepended(xs, collectionNil)
+ }
}
override def reifyModifiers(m: Modifiers) =
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 35528a276d..c2aab19f18 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -4,7 +4,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index d028dcc21e..8a599bd8c7 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -7,5 +7,6 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index 4f67a22b8f..bf718c27cc 100644
--- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -64,9 +64,12 @@ trait ContextTrees { self: Global =>
def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
if (contexts.isEmpty) None
else {
+ // binary search on contexts, loop invar: lo <= hi, recursion metric: `hi - lo`
@tailrec
def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
- if (pos properlyPrecedes contexts(lo).pos)
+ // [SI-8239] enforce loop invariant & ensure recursion metric decreases monotonically on every recursion
+ if (lo > hi) previousSibling
+ else if (pos properlyPrecedes contexts(lo).pos)
previousSibling
else if (contexts(hi).pos properlyPrecedes pos)
Some(contexts(hi))
@@ -76,9 +79,18 @@ trait ContextTrees { self: Global =>
if (midpos includes pos)
Some(contexts(mid))
else if (midpos properlyPrecedes pos)
+ // recursion metric: (hi - ((lo + hi)/2 + 1)) < (hi - lo)
+ // since (hi - ((lo + hi)/2 + 1)) - (hi - lo) = lo - ((lo + hi)/2 + 1) < 0
+ // since 2*lo - lo - hi - 2 = lo - hi - 2 < 0
+ // since lo < hi + 2
+ // can violate lo <= hi, hence the lo > hi check at the top [SI-8239]
loop(mid + 1, hi, Some(contexts(mid)))
- else
+ else if (lo != hi) // avoid looping forever (lo == hi violates the recursion metric) [SI-8239]
+ // recursion metric: ((lo + hi)/2) - lo < (hi - lo)
+ // since ((lo + hi)/2) - lo - (hi - lo) = ((lo + hi)/2) - hi < 0
+ // since 2 * (((lo + hi)/2) - hi) = lo - hi < 0 since lo < hi
loop(lo, mid, previousSibling)
+ else previousSibling
}
}
loop(0, contexts.length - 1, None)
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 90aabc5a9a..c3728fa02a 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -325,36 +325,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
// Create a proxy for Java serialization that allows us to avoid mutation
// during de-serialization. This is the Serialization Proxy Pattern.
- protected final def writeReplace(): AnyRef = new SerializationProxy(this)
-}
-
-@SerialVersionUID(1L)
-private class SerializationProxy[B](@transient private var orig: List[B]) extends Serializable {
-
- private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = orig
- while (!xs.isEmpty) {
- out.writeObject(xs.head)
- xs = xs.tail
- }
- out.writeObject(ListSerializeEnd)
- }
-
- // Java serialization calls this before readResolve during de-serialization.
- // Read the whole list and store it in `orig`.
- private def readObject(in: ObjectInputStream) {
- val builder = List.newBuilder[B]
- while (true) in.readObject match {
- case ListSerializeEnd =>
- orig = builder.result()
- return
- case a =>
- builder += a.asInstanceOf[B]
- }
- }
-
- // Provide the result stored in `orig` for Java serialization
- private def readResolve(): AnyRef = orig
+ protected final def writeReplace(): AnyRef = new List.SerializationProxy(this)
}
/** The empty list.
@@ -385,8 +356,7 @@ case object Nil extends List[Nothing] {
* @version 1.0, 15/07/2003
* @since 2.8
*/
-final case class ::[B](private val hd: B, private[scala] var tl: List[B]) extends List[B] {
- override def head : B = hd
+final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
override def tail : List[B] = tl
override def isEmpty: Boolean = false
}
@@ -405,6 +375,35 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
+
+ @SerialVersionUID(1L)
+ private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
+
+ private def writeObject(out: ObjectOutputStream) {
+ var xs: List[A] = orig
+ while (!xs.isEmpty) {
+ out.writeObject(xs.head)
+ xs = xs.tail
+ }
+ out.writeObject(ListSerializeEnd)
+ }
+
+ // Java serialization calls this before readResolve during de-serialization.
+ // Read the whole list and store it in `orig`.
+ private def readObject(in: ObjectInputStream) {
+ val builder = List.newBuilder[A]
+ while (true) in.readObject match {
+ case ListSerializeEnd =>
+ orig = builder.result()
+ return
+ case a =>
+ builder += a.asInstanceOf[A]
+ }
+ }
+
+ // Provide the result stored in `orig` for Java serialization
+ private def readResolve(): AnyRef = orig
+ }
}
/** Only used for list serialization */
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index e21a8dfa8a..0fbf7942d4 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -82,6 +82,16 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 2 */
@@ -102,6 +112,17 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 3 */
@@ -123,6 +144,18 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 4 */
@@ -145,6 +178,19 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3); f(elem4)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3) || f(elem4)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3) && f(elem4)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else if (f(elem4)) Some(elem4)
+ else None
+ }
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 49c3b4c3cd..60de147477 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1108,11 +1108,15 @@ object Stream extends SeqFactory[Stream] {
override def isEmpty = false
override def head = hd
@volatile private[this] var tlVal: Stream[A] = _
- def tailDefined: Boolean = tlVal ne null
+ @volatile private[this] var tlGen = tl _
+ def tailDefined: Boolean = tlGen eq null
override def tail: Stream[A] = {
if (!tailDefined)
synchronized {
- if (!tailDefined) tlVal = tl
+ if (!tailDefined) {
+ tlVal = tlGen()
+ tlGen = null
+ }
}
tlVal
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 43d46cf4d0..8e1d950d00 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -131,6 +131,7 @@ self =>
* end characters, i.e. apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
+ @deprecated("Use `lines` instead.","2.11.0")
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 29c92a111c..47fb66744e 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -22,9 +22,9 @@ import generic.CanBuildFrom
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
*
- * This map is not indended to contain more than 2^29 entries (approximately
- * 500 million). The maximum capacity is 2^30, but performance will degrade
- * rapidly as 2^30 is approached.
+ * This map is not intended to contain more than 2^29^ entries (approximately
+ * 500 million). The maximum capacity is 2^30^, but performance will degrade
+ * rapidly as 2^30^ is approached.
*
*/
final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean)
@@ -291,24 +291,21 @@ extends AbstractMap[K, V]
private[this] val vz = _values
private[this] var index = 0
- private[this] var found = false
- def hasNext = found || (index<hz.length && {
+ def hasNext: Boolean = index<hz.length && {
var h = hz(index)
- if (h+h != 0) found = true
- else {
+ while (h+h == 0) {
index += 1
- while (index < hz.length && { h = hz(index); h+h == 0 }) index += 1
- found = (index < hz.length)
+ if (index >= hz.length) return false
+ h = hz(index)
}
- found
- })
+ true
+ }
- def next = {
- if (found || hasNext) {
- val ans = (_keys(index).asInstanceOf[K], _values(index).asInstanceOf[V])
+ def next: (K, V) = {
+ if (hasNext) {
+ val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V])
index += 1
- found = false
ans
}
else throw new NoSuchElementException("next")
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index d3c5a6b019..a55432fd71 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -14,9 +14,45 @@ import scala.annotation.implicitNotFound
import scala.util.Try
/**
- * An `ExecutionContext` is an abstraction over an entity that can execute program logic.
+ * An `ExecutionContext` can execute program logic, typically but not
+ * necessarily on a thread pool.
+ *
+ * APIs such as `Future.onComplete` require you to provide a callback
+ * and an implicit `ExecutionContext`. The implicit `ExecutionContext`
+ * will be used to execute the callback.
+ *
+ * It is possible to simply import
+ * `scala.concurrent.ExecutionContext.Implicits.global` to obtain an
+ * implicit `ExecutionContext`. This global context is a reasonable
+ * default thread pool.
+ *
+ * However, application developers should carefully consider where they
+ * want to set policy; ideally, one place per application (or per
+ * logically-related section of code) will make a decision about
+ * which `ExecutionContext` to use. That is, you might want to avoid
+ * hardcoding `scala.concurrent.ExecutionContext.Implicits.global` all
+ * over the place in your code.
+ * One approach is to add `(implicit ec: ExecutionContext)`
+ * to methods which need an `ExecutionContext`. Then import a specific
+ * context in one place for the entire application or module,
+ * passing it implicitly to individual methods.
+ *
+ * A custom `ExecutionContext` may be appropriate to execute code
+ * which blocks on IO or performs long-running computations.
+ * `ExecutionContext.fromExecutorService` and `ExecutionContext.fromExecutor`
+ * are good ways to create a custom `ExecutionContext`.
+ *
+ * The intent of `ExecutionContext` is to lexically scope code execution.
+ * That is, each method, class, file, package, or application determines
+ * how to run its own code. This avoids issues such as running
+ * application callbacks on a thread pool belonging to a networking library.
+ * The size of a networking library's thread pool can be safely configured,
+ * knowing that only that library's network operations will be affected.
+ * Application callback execution can be configured separately.
*/
-@implicitNotFound("Cannot find an implicit ExecutionContext, either import scala.concurrent.ExecutionContext.Implicits.global or use a custom one")
+@implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass
+an (implicit ec: ExecutionContext) parameter to your method
+or import scala.concurrent.ExecutionContext.Implicits.global.""")
trait ExecutionContext {
/** Runs a block of code on this execution context.
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 86132bb876..6743b9e42a 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -67,7 +67,21 @@ import java.util.regex.{ Pattern, Matcher }
* Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a
* pattern match.
*
- * Note, however, that when Regex is used as an extractor in a pattern match, it
+ * Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]]
+ * needs to be initialized (by calling `hasNext` or `next()`, or causing these to be
+ * called) before information about a match can be retrieved:
+ *
+ * {{{
+ * val msg = "I love Scala"
+ *
+ * // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException
+ *
+ * val matches = " ".r.findAllIn(msg)
+ * matches.hasNext // initializes the matcher
+ * val start = matches.start
+ * }}}
+ *
+ * When Regex is used as an extractor in a pattern match, note that it
* only succeeds if the whole text can be matched. For this reason, one usually
* calls a method to find the matching substrings, and then use it as an extractor
* to break match into subgroups.
@@ -267,6 +281,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* that can be queried for data such as the text that precedes the
* match, subgroups, etc.
*
+ * Attempting to retrieve information about a match before initializing
+ * the iterator can result in [[java.lang.IllegalStateException]]s. See
+ * [[scala.util.matching.Regex.MatchIterator]] for details.
+ *
* @param source The text to match against.
* @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
@@ -476,15 +494,7 @@ trait UnanchoredRegex extends Regex {
}
/** This object defines inner classes that describe
- * regex matches and helper objects. The class hierarchy
- * is as follows:
- *
- * {{{
- * MatchData
- * / \
- * MatchIterator Match
- * }}}
- *
+ * regex matches and helper objects.
*/
object Regex {
@@ -634,7 +644,15 @@ object Regex {
def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
}
- /** A class to step through a sequence of regex matches
+ /** A class to step through a sequence of regex matches.
+ *
+ * All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw
+ * a [[java.lang.IllegalStateException]] until the matcher is initialized. The
+ * matcher can be initialized by calling `hasNext` or `next()` or causing these
+ * methods to be called, such as by invoking `toString` or iterating through
+ * the iterator's elements.
+ *
+ * @see [[java.util.regex.Matcher]]
*/
class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 3bcf751ace..ec20a89a10 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -198,9 +198,9 @@ private[reflect] trait BuildUtils { self: Universe =>
val SyntacticFunction: SyntacticFunctionExtractor
trait SyntacticFunctionExtractor {
- def apply(params: List[Tree], body: Tree): Tree
+ def apply(params: List[Tree], body: Tree): Function
- def unapply(tree: Tree): Option[(List[ValDef], Tree)]
+ def unapply(tree: Function): Option[(List[ValDef], Tree)]
}
val SyntacticDefDef: SyntacticDefDefExtractor
@@ -248,6 +248,13 @@ private[reflect] trait BuildUtils { self: Universe =>
def unapply(tree: Tree): Option[(Tree)]
}
+ val SyntacticEmptyTypeTree: SyntacticEmptyTypeTreeExtractor
+
+ trait SyntacticEmptyTypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(tt: TypeTree): Boolean
+ }
+
val SyntacticFor: SyntacticForExtractor
val SyntacticForYield: SyntacticForExtractor
@@ -283,5 +290,11 @@ private[reflect] trait BuildUtils { self: Universe =>
def apply(name: Name, isBackquoted: Boolean = false): Ident
def unapply(tree: Ident): Option[(Name, Boolean)]
}
+
+ val SyntacticImport: SyntacticImportExtractor
+ trait SyntacticImportExtractor {
+ def apply(expr: Tree, selectors: List[Tree]): Import
+ def unapply(imp: Import): Some[(Tree, List[Tree])]
+ }
}
}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
index 5667d93e29..6539137cee 100644
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -52,7 +52,7 @@ package api
* val imported = importer.importTree(tree)
*
* // after the tree is imported, it can be evaluated as usual
- * val tree = toolBox.resetAllAttrs(imported.duplicate)
+ * val tree = toolBox.untypecheck(imported.duplicate)
* val valueOfX = toolBox.eval(imported).asInstanceOf[T]
* ...
* }
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 83da5141b9..60e00ca5fd 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -1066,7 +1066,7 @@ trait Trees { self: Universe =>
* UnApply(
* // a dummy node that carries the type of unapplication to patmat
* // the <unapply-selector> here doesn't have an underlying symbol
- * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
* Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
* // arguments of the unapply => nothing synthetic here
* List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 4fde57ed02..d634034fe9 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -48,7 +48,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
- dropOtherAnnotations(annotations, cls).nonEmpty
+ dropOtherAnnotations(annotations, cls) ne Nil
def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
//OPT inlined from exists to save on #closures; was: annotations find (_ matches cls)
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 6106339324..c5581601de 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -33,19 +33,19 @@ trait BuildUtils { self: SymbolTable =>
}
def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
- newFreeTermSymbol(newTermName(name), value, flags, origin)
+ newFreeTermSymbol(newTermName(name), value, flags, origin).markFlagsCompleted(mask = AllFlags)
def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
- newFreeTypeSymbol(newTypeName(name), flags, origin)
+ newFreeTypeSymbol(newTypeName(name), flags, origin).markFlagsCompleted(mask = AllFlags)
def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
- owner.newNestedSymbol(name, pos, flags, isClass)
+ owner.newNestedSymbol(name, pos, flags, isClass).markFlagsCompleted(mask = AllFlags)
def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
sym.setAnnotations(annots)
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S =
- sym.setTypeSignature(tpe)
+ sym.setTypeSignature(tpe).markAllCompleted()
def This(sym: Symbol): Tree = self.This(sym)
@@ -143,7 +143,7 @@ trait BuildUtils { self: SymbolTable =>
def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
- def freshTermName(prefix: String): TermName = self.freshTermName(prefix)
+ def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX): TermName = self.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = self.freshTypeName(prefix)
@@ -435,16 +435,13 @@ trait BuildUtils { self: SymbolTable =>
}
object SyntacticFunction extends SyntacticFunctionExtractor {
- def apply(params: List[Tree], body: Tree): Tree = {
+ def apply(params: List[Tree], body: Tree): Function = {
val params0 :: Nil = mkParam(params :: Nil, PARAM)
require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values")
Function(params0, body)
}
- def unapply(tree: Tree): Option[(List[ValDef], Tree)] = tree match {
- case Function(params, body) => Some((params, body))
- case _ => None
- }
+ def unapply(tree: Function): Option[(List[ValDef], Tree)] = Function.unapply(tree)
}
object SyntacticNew extends SyntacticNewExtractor {
@@ -537,13 +534,15 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[Tree] = gen.Filter.unapply(tree)
}
- // abstract over possible alternative representations of no type in valdef
- protected object EmptyTypTree {
- def unapply(tree: Tree): Boolean = tree match {
- case EmptyTree => true
- case tt: TypeTree if (tt.original == null || tt.original.isEmpty) => true
- case _ => false
- }
+ // If a tree in type position isn't provided by the user (e.g. `tpt` fields of
+ // `ValDef` and `DefDef`, function params etc), then it's going to be parsed as
+ // TypeTree with empty original and empty tpe. This extractor matches such trees
+ // so that one can write q"val x = 2" to match typecheck(q"val x = 2"). Note that
+ // TypeTree() is the only possible representation for empty trees in type positions.
+ // We used to sometimes receive EmptyTree in such cases, but not anymore.
+ object SyntacticEmptyTypeTree extends SyntacticEmptyTypeTreeExtractor {
+ def apply(): TypeTree = self.TypeTree()
+ def unapply(tt: TypeTree): Boolean = tt.original == null || tt.original.isEmpty
}
// match a sequence of desugared `val $pat = $value`
@@ -561,8 +560,8 @@ trait BuildUtils { self: SymbolTable =>
case ValDef(_, name1, _, Match(MaybeUnchecked(value), CaseDef(pat, EmptyTree, Ident(name2)) :: Nil)) :: UnPatSeq(rest)
if name1 == name2 =>
Some((pat, value) :: rest)
- // case q"${_} val $name: ${EmptyTypTree()} = $value" :: UnPatSeq(rest) =>
- case ValDef(_, name, EmptyTypTree(), value) :: UnPatSeq(rest) =>
+ // case q"${_} val $name: ${SyntacticEmptyTypeTree()} = $value" :: UnPatSeq(rest) =>
+ case ValDef(_, name, SyntacticEmptyTypeTree(), value) :: UnPatSeq(rest) =>
Some((Bind(name, self.Ident(nme.WILDCARD)), value) :: rest)
// case q"${_} val $name: $tpt = $value" :: UnPatSeq(rest) =>
case ValDef(_, name, tpt, value) :: UnPatSeq(rest) =>
@@ -604,8 +603,8 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[(Tree, Tree)] = tree match {
case Function(ValDef(Modifiers(PARAM, _, _), name, tpt, EmptyTree) :: Nil, body) =>
tpt match {
- case EmptyTypTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
- case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
+ case SyntacticEmptyTypeTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body))
+ case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body))
}
case UnVisitor(_, CaseDef(pat, EmptyTree, body) :: Nil) =>
Some((pat, body))
@@ -770,6 +769,146 @@ trait BuildUtils { self: SymbolTable =>
}
def unapply(tree: Ident): Some[(Name, Boolean)] = Some((tree.name, tree.hasAttachment[BackquotedIdentifierAttachment.type]))
}
+
+ /** Facade over Imports and ImportSelectors that lets to structurally
+ * deconstruct/reconstruct them.
+ *
+ * Selectors are represented in the following way:
+ * 1. q"import foo._" <==> q"import foo.${pq"_"}"
+ * 2. q"import foo.bar" <==> q"import foo.${pq"bar"}"
+ * 3. q"import foo.{bar => baz}" <==> q"import foo.${pq"bar -> baz"}"
+ * 4. q"import foo.{bar => _}" <==> q"import foo.${pq"bar -> _"}"
+ *
+ * All names in selectors are TermNames despite the fact ImportSelector
+ * can theoretically contain TypeNames too (but they never do in practice.)
+ */
+ object SyntacticImport extends SyntacticImportExtractor {
+ // construct/deconstruct {_} import selector
+ private object WildcardSelector {
+ def apply(offset: Int): ImportSelector = ImportSelector(nme.WILDCARD, offset, null, -1)
+ def unapply(sel: ImportSelector): Option[Int] = sel match {
+ case ImportSelector(nme.WILDCARD, offset, null, -1) => Some(offset)
+ case _ => None
+ }
+ }
+
+ // construct/deconstruct {foo} import selector
+ private object NameSelector {
+ def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, name, offset)
+ def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+ case ImportSelector(name1, offset1, name2, offset2) if name1 == name2 && offset1 == offset2 =>
+ Some((name1.toTermName, offset1))
+ case _ =>
+ None
+ }
+ }
+
+ // construct/deconstruct {foo => bar} import selector
+ private object RenameSelector {
+ def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector =
+ ImportSelector(name1, offset1, name2, offset2)
+ def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = sel match {
+ case ImportSelector(_, _, null | nme.WILDCARD, _) =>
+ None
+ case ImportSelector(name1, offset1, name2, offset2) if name1 != name2 =>
+ Some((name1.toTermName, offset1, name2.toTermName, offset2))
+ case _ =>
+ None
+ }
+ }
+
+ // construct/deconstruct {foo => _} import selector
+ private object UnimportSelector {
+ def apply(name: TermName, offset: Int): ImportSelector =
+ ImportSelector(name, offset, nme.WILDCARD, -1)
+ def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match {
+ case ImportSelector(name, offset, nme.WILDCARD, _) => Some((name.toTermName, offset))
+ case _ => None
+ }
+ }
+
+ // represent {_} import selector as pq"_"
+ private object WildcardSelectorRepr {
+ def apply(pos: Position): Tree = atPos(pos)(self.Ident(nme.WILDCARD))
+ def unapply(tree: Tree): Option[Position] = tree match {
+ case self.Ident(nme.WILDCARD) => Some(tree.pos)
+ case _ => None
+ }
+ }
+
+ // represent {foo} import selector as pq"foo"
+ private object NameSelectorRepr {
+ def apply(name: TermName, pos: Position): Tree = atPos(pos)(Bind(name, WildcardSelectorRepr(pos)))
+ def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+ case Bind(name, WildcardSelectorRepr(_)) => Some((name.toTermName, tree.pos))
+ case _ => None
+ }
+ }
+
+ // pq"left -> right"
+ private object Arrow {
+ def apply(left: Tree, right: Tree): Apply =
+ Apply(self.Ident(nme.MINGT), left :: right :: Nil)
+ def unapply(tree: Apply): Option[(Tree, Tree)] = tree match {
+ case Apply(self.Ident(nme.MINGT), left :: right :: Nil) => Some((left, right))
+ case _ => None
+ }
+ }
+
+ // represent {foo => bar} import selector as pq"foo -> bar"
+ private object RenameSelectorRepr {
+ def apply(name1: TermName, pos1: Position, name2: TermName, pos2: Position): Tree = {
+ val left = NameSelectorRepr(name1, pos1)
+ val right = NameSelectorRepr(name2, pos2)
+ atPos(wrappingPos(left :: right :: Nil))(Arrow(left, right))
+ }
+ def unapply(tree: Tree): Option[(TermName, Position, TermName, Position)] = tree match {
+ case Arrow(NameSelectorRepr(name1, pos1), NameSelectorRepr(name2, pos2)) =>
+ Some((name1.toTermName, pos1, name2.toTermName, pos2))
+ case _ =>
+ None
+ }
+ }
+
+ // represent {foo => _} import selector as pq"foo -> _"
+ private object UnimportSelectorRepr {
+ def apply(name: TermName, pos: Position): Tree =
+ atPos(pos)(Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(pos)))
+ def unapply(tree: Tree): Option[(TermName, Position)] = tree match {
+ case Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(_)) =>
+ Some((name, pos))
+ case _ =>
+ None
+ }
+ }
+
+ private def derivedPos(t: Tree, offset: Int): Position =
+ if (t.pos == NoPosition) NoPosition else t.pos.withPoint(offset)
+
+ private def derivedOffset(pos: Position): Int =
+ if (pos == NoPosition) -1 else pos.point
+
+ def apply(expr: Tree, selectors: List[Tree]): Import = {
+ val importSelectors = selectors.map {
+ case WildcardSelectorRepr(pos) => WildcardSelector(derivedOffset(pos))
+ case NameSelectorRepr(name, pos) => NameSelector(name, derivedOffset(pos))
+ case RenameSelectorRepr(name1, pos1, name2, pos2) => RenameSelector(name1, derivedOffset(pos1), name2, derivedOffset(pos2))
+ case UnimportSelectorRepr(name, pos) => UnimportSelector(name, derivedOffset(pos))
+ case tree => throw new IllegalArgumentException(s"${showRaw(tree)} doesn't correspond to import selector")
+ }
+ Import(expr, importSelectors)
+ }
+
+ def unapply(imp: Import): Some[(Tree, List[Tree])] = {
+ val selectors = imp.selectors.map {
+ case WildcardSelector(offset) => WildcardSelectorRepr(derivedPos(imp, offset))
+ case NameSelector(name, offset) => NameSelectorRepr(name, derivedPos(imp, offset))
+ case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2))
+ case UnimportSelector(name, offset) => UnimportSelectorRepr(name, derivedPos(imp, offset))
+ }
+ Some((imp.expr, selectors))
+ }
+ }
}
val build: BuildImpl = new BuildImpl
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 4d24f0b219..7a0c70caf6 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -30,12 +30,12 @@ trait Definitions extends api.StandardDefinitions {
private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
- clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
+ clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) markAllCompleted
}
private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val params = msym.newSyntheticValueParams(formals)
- msym setInfo MethodType(params, restpe)
+ msym setInfo MethodType(params, restpe) markAllCompleted
}
private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol =
owner.info.decls enter newMethod(owner, name, formals, restpe, flags)
@@ -251,8 +251,8 @@ trait Definitions extends api.StandardDefinitions {
}
// top types
- lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)
+ lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted
lazy val ObjectClass = getRequiredClass(sn.Object.toString)
// Cached types for core monomorphic classes
@@ -275,7 +275,7 @@ trait Definitions extends api.StandardDefinitions {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
anyval.info.decls enter av_constr
- anyval
+ anyval markAllCompleted
}).asInstanceOf[ClassSymbol]
def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
@@ -287,8 +287,10 @@ trait Definitions extends api.StandardDefinitions {
locally {
this initFlags ABSTRACT | FINAL
this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
+ this markAllCompleted
}
final override def isBottomClass = true
+ final override def isThreadsafe(purpose: SymbolOps): Boolean = true
}
final object NothingClass extends BottomClassSymbol(tpnme.Nothing, AnyClass) {
override def isSubClass(that: Symbol) = true
@@ -357,7 +359,7 @@ trait Definitions extends api.StandardDefinitions {
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
- lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL)
+ lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) markAllCompleted
lazy val SerializableClass = requiredClass[scala.Serializable]
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
@@ -1127,6 +1129,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val AnnotationDefaultAttr: ClassSymbol = {
val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L)
sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym)
+ markAllCompleted(sym)
RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match {
case existing :: _ =>
existing.asInstanceOf[ClassSymbol]
@@ -1226,7 +1229,7 @@ trait Definitions extends api.StandardDefinitions {
val tparam = clazz.newSyntheticTypeParam("T0", flags)
val parents = List(AnyRefTpe, parentFn(tparam))
- clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
+ clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) markAllCompleted
}
def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = {
@@ -1237,7 +1240,7 @@ trait Definitions extends api.StandardDefinitions {
case (_, restpe) => NullaryMethodType(restpe)
}
- msym setInfoAndEnter genPolyType(tparams, mtpe)
+ msym setInfoAndEnter genPolyType(tparams, mtpe) markAllCompleted
}
/** T1 means one type parameter.
diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala
index 1de8d425ad..7e9a568266 100644
--- a/src/reflect/scala/reflect/internal/FreshNames.scala
+++ b/src/reflect/scala/reflect/internal/FreshNames.scala
@@ -8,7 +8,7 @@ package internal
import scala.reflect.internal.util.FreshNameCreator
-trait FreshNames { self: Names =>
+trait FreshNames { self: Names with StdNames =>
// SI-6879 Keeps track of counters that are supposed to be globally unique
// as opposed to traditional freshers that are unique to compilation units.
val globalFreshNameCreator = new FreshNameCreator
@@ -17,8 +17,8 @@ trait FreshNames { self: Names =>
def currentFreshNameCreator: FreshNameCreator
// create fresh term/type name using implicit fresh name creator
- def freshTermName(prefix: String = "x$")(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
- def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
+ def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX)(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
+ def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
// Extractor that matches names which were generated by some
// FreshNameCreator with known prefix. Extracts user-specified
@@ -36,4 +36,4 @@ trait FreshNames { self: Names =>
else Some(NameTransformer.decode(sname.replaceFirst(quotedCreatorPrefix, "").replaceAll("\\d*$", "")))
}
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 483d0dd656..ff91b08ea1 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -4,6 +4,7 @@ package internal
import scala.collection.mutable.WeakHashMap
import scala.ref.WeakReference
+import scala.reflect.internal.Flags._
// SI-6241: move importers to a mirror
trait Importers extends api.Importers { to: SymbolTable =>
@@ -87,6 +88,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
}
my setInfo GenPolyType(mytypeParams, importType(theirCore))
my setAnnotations (their.annotations map importAnnotationInfo)
+ markAllCompleted(my)
}
}
} finally {
@@ -142,6 +144,7 @@ trait Importers extends api.Importers { to: SymbolTable =>
myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
}
symMap.weakUpdate(their, my)
+ markFlagsCompleted(my)(mask = AllFlags)
my setInfo recreatedSymbolCompleter(my, their)
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 4075653674..73ce59feb2 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -345,6 +345,13 @@ trait Names extends api.Names {
i += 1
i == prefix.length
}
+ final def startsWith(prefix: String, start: Int): Boolean = {
+ var i = 0
+ while (i < prefix.length && start + i < len &&
+ chrs(index + start + i) == prefix.charAt(i))
+ i += 1
+ i == prefix.length
+ }
/** Does this name end with suffix? */
final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
@@ -357,6 +364,13 @@ trait Names extends api.Names {
i += 1
i > suffix.length
}
+ final def endsWith(suffix: String, end: Int): Boolean = {
+ var i = 1
+ while (i <= suffix.length && i <= end &&
+ chrs(index + end - i) == suffix.charAt(suffix.length - i))
+ i += 1
+ i > suffix.length
+ }
final def containsName(subname: String): Boolean = containsName(newTermName(subname))
final def containsName(subname: Name): Boolean = {
@@ -382,9 +396,9 @@ trait Names extends api.Names {
final def startChar: Char = this charAt 0
final def endChar: Char = this charAt len - 1
final def startsWith(char: Char): Boolean = len > 0 && startChar == char
- final def startsWith(name: String): Boolean = startsWith(newTermName(name))
+ final def startsWith(name: String): Boolean = startsWith(name, 0)
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
- final def endsWith(name: String): Boolean = endsWith(newTermName(name))
+ final def endsWith(name: String): Boolean = endsWith(name, len)
/** Rewrite the confusing failure indication via result == length to
* the normal failure indication via result == -1.
@@ -443,9 +457,10 @@ trait Names extends api.Names {
}
/** TODO - find some efficiency. */
- def append(ch: Char) = newName("" + this + ch)
- def append(suffix: String) = newName("" + this + suffix)
- def append(suffix: Name) = newName("" + this + suffix)
+ def append(ch: Char) = newName(toString + ch)
+ def append(suffix: String) = newName(toString + suffix)
+ def append(suffix: Name) = newName(toString + suffix)
+ def append(separator: Char, suffix: Name) = newName(toString + separator + suffix)
def prepend(prefix: String) = newName("" + prefix + this)
def decodedName: ThisNameType = newName(decode)
@@ -463,7 +478,7 @@ trait Names extends api.Names {
*/
final class NameOps[T <: Name](name: T) {
import NameTransformer._
- def stripSuffix(suffix: String): T = stripSuffix(suffix: TermName)
+ def stripSuffix(suffix: String): T = if (name endsWith suffix) dropRight(suffix.length) else name // OPT avoid creating a Name with `suffix`
def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
def take(n: Int): T = name.subName(0, n).asInstanceOf[T]
def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
@@ -500,21 +515,21 @@ trait Names extends api.Names {
/** TermName_S and TypeName_S have fields containing the string version of the name.
* TermName_R and TypeName_R recreate it each time toString is called.
*/
- private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+ private final class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
override def newName(str: String): TermName = newTermNameCached(str)
}
- private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+ private final class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
override def newName(str: String): TypeName = newTypeNameCached(str)
}
- private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+ private final class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
override def toString = new String(chrs, index, len)
}
- private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+ private final class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
override def toString = new String(chrs, index, len)
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 28d799ea0c..679186f938 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -295,22 +295,23 @@ trait StdNames {
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Base strings from which synthetic names are derived. */
- val BITMAP_PREFIX = "bitmap$"
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
- val DO_WHILE_PREFIX = "doWhile$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val EXPAND_SEPARATOR_STRING = "$$"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
- val PROTECTED_PREFIX = "protected$"
- val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SUPER_PREFIX_STRING = "super$"
- val WHILE_PREFIX = "while$"
- val FRESH_PREFIX = "fresh"
- val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names
+ val BITMAP_PREFIX = "bitmap$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val FRESH_TERM_NAME_PREFIX = "x$"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
+ val PROTECTED_PREFIX = "protected$"
+ val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
+ val SUPER_PREFIX_STRING = "super$"
+ val WHILE_PREFIX = "while$"
+ val FRESH_PREFIX = "fresh"
+ val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names
// Compiler internal names
val ANYname: NameType = "<anyname>"
@@ -602,12 +603,14 @@ trait StdNames {
val SyntacticBlock: NameType = "SyntacticBlock"
val SyntacticClassDef: NameType = "SyntacticClassDef"
val SyntacticDefDef: NameType = "SyntacticDefDef"
+ val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree"
val SyntacticFilter: NameType = "SyntacticFilter"
val SyntacticFor: NameType = "SyntacticFor"
val SyntacticForYield: NameType = "SyntacticForYield"
val SyntacticFunction: NameType = "SyntacticFunction"
val SyntacticFunctionType: NameType = "SyntacticFunctionType"
val SyntacticIdent: NameType = "SyntacticIdent"
+ val SyntacticImport: NameType = "SyntacticImport"
val SyntacticMatch: NameType = "SyntacticMatch"
val SyntacticNew: NameType = "SyntacticNew"
val SyntacticObjectDef: NameType = "SyntacticObjectDef"
@@ -822,31 +825,33 @@ trait StdNames {
def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
// ASCII names for operators
- val ADD = encode("+")
- val AND = encode("&")
- val ASR = encode(">>")
- val CONS = encode("::")
- val DIV = encode("/")
- val EQ = encode("==")
- val EQL = encode("=")
- val GE = encode(">=")
- val GT = encode(">")
- val HASHHASH = encode("##")
- val LE = encode("<=")
- val LSL = encode("<<")
- val LSR = encode(">>>")
- val LT = encode("<")
- val MINUS = encode("-")
- val MOD = encode("%")
- val MUL = encode("*")
- val NE = encode("!=")
- val OR = encode("|")
- val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
- val PLUSPLUS = encode("++")
- val SUB = MINUS // ... as does SUB with PLUS
- val XOR = encode("^")
- val ZAND = encode("&&")
- val ZOR = encode("||")
+ val ADD = encode("+")
+ val AND = encode("&")
+ val ASR = encode(">>")
+ val CONS = encode("::")
+ val COLONPLUS = encode(":+")
+ val DIV = encode("/")
+ val EQ = encode("==")
+ val EQL = encode("=")
+ val GE = encode(">=")
+ val GT = encode(">")
+ val HASHHASH = encode("##")
+ val LE = encode("<=")
+ val LSL = encode("<<")
+ val LSR = encode(">>>")
+ val LT = encode("<")
+ val MINUS = encode("-")
+ val MINGT = encode("->")
+ val MOD = encode("%")
+ val MUL = encode("*")
+ val NE = encode("!=")
+ val OR = encode("|")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val PLUSPLUS = encode("++")
+ val SUB = MINUS // ... as does SUB with PLUS
+ val XOR = encode("^")
+ val ZAND = encode("&&")
+ val ZOR = encode("||")
// unary operators
val UNARY_~ = encode("unary_~")
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index b538648b36..c088e8f57c 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -125,7 +125,7 @@ abstract class SymbolPairs {
* considered as a (lo, high) pair? Types always match. Term symbols
* match if their member types relative to `self` match.
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean
+ protected def matches(lo: Symbol, high: Symbol): Boolean
/** The parents and base classes of `base`. Can be refined in subclasses.
*/
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index f49ddaf6ca..2969bd92de 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -55,16 +55,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
new FreeTypeSymbol(name, origin) initFlags flags
- /** Determines whether the given information request should trigger the given symbol's completer.
- * See comments to `Symbol.needsInitialize` for details.
- */
- protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
- completer match {
- case null => false
- case _: FlagAgnosticCompleter => !isFlagRelated
- case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}")
- }
-
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
*/
@@ -106,18 +96,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def knownDirectSubclasses = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
children
}
def selfType = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
typeOfThis
}
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
+
+ // automatic full initialization on access to info from reflection API is a double-edged sword
+ // on the one hand, it's convenient so that the users don't have to deal with initialization themselves before printing out stuff
+ // (e.g. printing out a method's signature without fully initializing it would result in <_>'s for parameters
+ // on the other hand, this strategy can potentially cause unexpected effects due to being inconsistent with compiler's behavior
+ // so far I think user convenience outweighs the scariness, but we need to keep the tradeoff in mind
+ // NOTE: if you end up removing the call to fullyInitializeSymbol, consider that it would affect both runtime reflection and macros
def typeSignature: Type = { fullyInitializeSymbol(this); info }
def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
@@ -140,6 +139,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
with Annotatable[Symbol]
with Attachable {
+ // makes sure that all symbols that runtime reflection deals with are synchronized
+ private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol]
+ private def isAprioriThreadsafe = isThreadsafe(AllOps)
+ assert(isCompilerUniverse || isSynchronized || isAprioriThreadsafe, s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe")
+
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
@@ -609,20 +613,55 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
&& isTopLevel
&& nme.isReplWrapperName(name)
)
+
+ /** In our current architecture, symbols for top-level classes and modules
+ * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
+ * consider their job done.
+ *
+ * In order for such a dummy to provide meaningful info (e.g. a list of its members),
+ * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
+ * from ScalaSignature annotations and assigning it to symbols and types.
+ *
+ * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
+ * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
+ * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
+ * classes/modules nested into those and so on).
+ *
+ * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
+ * This happens because package symbols assign completer thunks to the dummies they create.
+ * Therefore metadata loading happens lazily and transparently.
+ *
+ * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
+ * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
+ * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
+ * produces incorrect results.
+ *
+ * One might think that the solution is simple: automatically call the completer
+ * whenever one needs flags, annotations and privateWithin - just like it's done for typeSignature.
+ * Unfortunately, this leads to weird crashes in scalac, and currently we can't attempt
+ * to fix the core of the compiler risk stability a few weeks before the final release.
+ * upd. Haha, "a few weeks before the final release". This surely sounds familiar :)
+ *
+ * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something
+ * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a
+ * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization.
+ */
final def getFlag(mask: Long): Long = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
flags & mask
}
/** Does symbol have ANY flag in `mask` set? */
final def hasFlag(mask: Long): Boolean = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
(flags & mask) != 0
}
def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong)
/** Does symbol have ALL the flags in `mask` set? */
final def hasAllFlags(mask: Long): Boolean = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize
(flags & mask) == mask
}
@@ -789,7 +828,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
*/
- def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
@@ -943,12 +982,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
- /** Some completers call sym.setInfo when still in-flight and then proceed with initialization (e.g. see LazyPackageType)
- * setInfo sets _validTo to current period, which means that after a call to setInfo isInitialized will start returning true.
- * Unfortunately, this doesn't mean that info becomes ready to be used, because subsequent initialization might change the info.
- * Therefore we need this method to distinguish between initialized and really initialized symbol states.
+ /** We consider a symbol to be thread-safe, when multiple concurrent threads can call its methods
+ * (either directly or indirectly via public reflection or internal compiler infrastructure),
+ * without any locking and everything works as it should work.
+ *
+ * In its basic form, `isThreadsafe` always returns false. Runtime reflection augments reflection infrastructure
+ * with threadsafety-tracking mechanism implemented in `SynchronizedSymbol` that communicates with underlying completers
+ * and can sometimes return true if the symbol has been completed to the point of thread safety.
+ *
+ * The `purpose` parameter signifies whether we want to just check immutability of certain flags for the given mask.
+ * This is necessary to enable robust auto-initialization of `Symbol.flags` for runtime reflection, and is also quite handy
+ * in avoiding unnecessary initializations when requesting for flags that have already been set.
*/
- final def isFullyInitialized: Boolean = _validTo != NoPeriod && (flags & LOCKED) == 0
+ def isThreadsafe(purpose: SymbolOps): Boolean = false
+ def markFlagsCompleted(mask: Long): this.type = this
+ def markAllCompleted(): this.type = this
/** Can this symbol be loaded by a reflective mirror?
*
@@ -1102,7 +1150,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def fullNameInternal(separator: Char): Name = (
if (isRoot || isRootPackage || this == NoSymbol) name
else if (owner.isEffectiveRoot) name
- else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
+ else effectiveOwner.enclClass.fullNameAsName(separator) append (separator, name)
)
def fullNameAsName(separator: Char): Name = fullNameInternal(separator).dropLocal
@@ -1225,7 +1273,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
private[this] var _privateWithin: Symbol = _
def privateWithin = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
_privateWithin
}
def privateWithin_=(sym: Symbol) { _privateWithin = sym }
@@ -1483,46 +1532,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false }
}
- /** Called when the programmer requests information that might require initialization of the underlying symbol.
- *
- * `isFlagRelated` and `mask` describe the nature of this information.
- * isFlagRelated = true means that the programmer needs particular bits in flags.
- * isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin).
- *
- * In our current architecture, symbols for top-level classes and modules
- * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
- * consider their job done.
- *
- * In order for such a dummy to provide meaningful info (e.g. a list of its members),
- * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
- * from ScalaSignature annotations and assigning it to symbols and types.
- *
- * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
- * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
- * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
- * classes/modules nested into those and so on).
- *
- * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
- * This happens because package symbols assign completer thunks to the dummies they create.
- * Therefore metadata loading happens lazily and transparently.
- *
- * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
- * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
- * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
- * produces incorrect results.
- *
- * One might think that the solution is simple: automatically call the completer whenever one needs
- * flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this
- * leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler
- * risk stability a few weeks before the final release.
- *
- * However we do need to fix this for runtime reflection, since it's not something we'd like to
- * expose to reflection users. Therefore a proposed solution is to check whether we're in a
- * runtime reflection universe and if yes then to commence initialization.
- */
- protected def needsInitialize(isFlagRelated: Boolean, mask: Long) =
- !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
-
/** Was symbol's type updated during given phase? */
final def hasTypeAt(pid: Phase#Id): Boolean = {
assert(isCompilerUniverse)
@@ -1681,7 +1690,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the annotations attached to member a definition (class, method, type, field).
*/
def annotations: List[AnnotationInfo] = {
- if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method.
+ if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize
_annotations
}
@@ -3404,8 +3414,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the new list of info-adjusted symbols
*/
def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = {
- val syms1 = syms map symFn
- syms1 map (_ substInfo (syms, syms1))
+ val syms1 = mapList(syms)(symFn)
+ mapList(syms1)(_ substInfo (syms, syms1))
}
/** Derives a new Type by first deriving new symbols as in deriveSymbols,
@@ -3445,9 +3455,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the newly created, info-adjusted symbols
*/
def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
- cloneSymbols(syms) map (_ modifyInfo infoFn)
+ mapList(cloneSymbols(syms))(_ modifyInfo infoFn)
def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
- cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
+ mapList(cloneSymbolsAtOwner(syms, owner))(_ modifyInfo infoFn)
/** Functions which perform the standard clone/substituting on the given symbols and type,
* then call the creator function with the new symbols and type as arguments.
@@ -3510,6 +3520,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
Statistics.newView("#symbols")(ids)
+
+// -------------- Completion --------------------------------------------------------
+
+ // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe`
+ case class SymbolOps(isFlagRelated: Boolean, mask: Long)
+ val AllOps = SymbolOps(isFlagRelated = false, mask = 0L)
+ def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask)
+
+ private def relevantSymbols(syms: Seq[Symbol]) = syms.flatMap(sym => List(sym, sym.moduleClass, sym.sourceModule))
+ def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = relevantSymbols(syms).foreach(_.markFlagsCompleted(mask))
+ def markAllCompleted(syms: Symbol*): Unit = relevantSymbols(syms).foreach(_.markAllCompleted)
}
object SymbolsStats {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index e602a12175..29fdba2781 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -49,10 +49,10 @@ abstract class TreeGen extends macros.TreeBuilder {
mkMethodCall(Select(receiver, method), targs, args)
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
- Apply(mkTypeApply(target, targs map TypeTree), args)
+ Apply(mkTypeApply(target, mapList(targs)(TypeTree)), args)
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
- mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+ mkTypeApply(mkAttributedRef(method), mapList(targs)(TypeTree))
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -281,7 +281,7 @@ abstract class TreeGen extends macros.TreeBuilder {
case tree :: Nil if flattenUnary =>
tree
case _ =>
- Apply(scalaDot(TupleClass(elems.length).companionModule.name), elems)
+ Apply(scalaDot(TupleClass(elems.length).name.toTermName), elems)
}
def mkTupleType(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 497a7c91b1..7bab15b0f4 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -284,17 +284,17 @@ abstract class TreeInfo {
/** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
* same object?
*/
- def isSelfConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
- case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSelfConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Ident(nme.CONSTRUCTOR) => true
+ case Select(This(_), nme.CONSTRUCTOR) => true
+ case _ => false
}
/** Is tree a super constructor call?
*/
- def isSuperConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSuperConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Select(Super(_, _), nme.CONSTRUCTOR) => true
+ case _ => false
}
/**
@@ -848,8 +848,10 @@ abstract class TreeInfo {
case _ => false
})
- def isMacroApplication(tree: Tree): Boolean =
- !tree.isDef && tree.symbol != null && tree.symbol.isTermMacro && !tree.symbol.isErroneous
+ def isMacroApplication(tree: Tree): Boolean = !tree.isDef && {
+ val sym = tree.symbol
+ sym != null && sym.isTermMacro && !sym.isErroneous
+ }
def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
case Block(_, expr) => isMacroApplicationOrBlock(expr)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 9ddaea4c62..4fbac235f4 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -899,7 +899,7 @@ trait Types
-1
}
- /** If this is a poly- or methodtype, a copy with cloned type / value parameters
+ /** If this is a ExistentialType, PolyType or MethodType, a copy with cloned type / value parameters
* owned by `owner`. Identity for all other types.
*/
def cloneInfo(owner: Symbol) = this
@@ -1969,15 +1969,15 @@ trait Types
def apply(value: Constant) = unique(new UniqueConstantType(value))
}
- private var _volatileRecursions: Int = 0
- def volatileRecursions = _volatileRecursions
- def volatileRecursions_=(value: Int) = _volatileRecursions = value
-
- private val _pendingVolatiles = new mutable.HashSet[Symbol]
- def pendingVolatiles = _pendingVolatiles
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ * with synchronized, because they are accessed only from isVolatile, which is called only from
+ * Typer.
+ */
+ private var volatileRecursions: Int = 0
+ private val pendingVolatiles = new mutable.HashSet[Symbol]
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
- require(args0.nonEmpty, this)
+ require(args0 ne Nil, this)
/** No unapplied type params size it has (should have) equally as many args. */
override def isHigherKinded = false
@@ -1988,12 +1988,39 @@ trait Types
// too little information is known to determine its kind, and
// it later turns out not to have kind *. See SI-4070. Only
// logging it for now.
- if (sym.typeParams.size != args.size)
+ val tparams = sym.typeParams
+ if (tparams.size != args.size)
devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
-
- val GenPolyType(tparams, result) = asSeenFromOwner(tp)
- assert((tparams eq Nil) || tparams == sym.typeParams, (tparams, sym.typeParams))
- result.instantiateTypeParams(sym.typeParams, args)
+ def asSeenFromInstantiated(tp: Type) =
+ asSeenFromOwner(tp).instantiateTypeParams(tparams, args)
+ // If we're called with a poly type, and we were to run the `asSeenFrom`, over the entire
+ // type, we can end up with new symbols for the type parameters (clones from TypeMap).
+ // The subsequent substitution of type arguments would fail. This problem showed up during
+ // the fix for SI-8046, however the solution taken there wasn't quite right, and led to
+ // SI-8170.
+ //
+ // Now, we detect the PolyType before both the ASF *and* the substitution, and just operate
+ // on the result type.
+ //
+ // TODO: Revisit this and explore the questions raised:
+ //
+ // AM: I like this better than the old code, but is there any way the tparams would need the ASF treatment as well?
+ // JZ: I think its largely irrelevant, as they are no longer referred to in the result type.
+ // In fact, you can get away with returning a type of kind * here and the sky doesn't fall:
+ // `case PolyType(`tparams`, result) => asSeenFromInstantiated(result)`
+ // But I thought it was better to retain the kind.
+ // AM: I've been experimenting with apply-type-args-then-ASF, but running into cycles.
+ // In general, it seems iffy the tparams can never occur in the result
+ // then we might as well represent the type as a no-arg typeref.
+ // AM: I've also been trying to track down uses of transform (pretty generic name for something that
+ // does not seem that widely applicable).
+ // It's kind of a helper for computing baseType (since it tries to propagate our type args to some
+ // other type, which has to be related to this type for that to make sense).
+ //
+ tp match {
+ case PolyType(`tparams`, result) => PolyType(tparams, asSeenFromInstantiated(result))
+ case _ => asSeenFromInstantiated(tp)
+ }
}
// note: does not go through typeRef. There's no need to because
@@ -2035,7 +2062,7 @@ trait Types
// to a java or scala symbol, but it does matter whether it occurs in java or scala code.
// TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
// represented as existential types.
- override def isHigherKinded = typeParams.nonEmpty
+ override def isHigherKinded = (typeParams ne Nil)
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
@@ -2221,7 +2248,7 @@ trait Types
//OPT specialize hashCode
override final def computeHashCode = {
import scala.util.hashing.MurmurHash3._
- val hasArgs = args.nonEmpty
+ val hasArgs = args ne Nil
var h = productSeed
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
@@ -2412,7 +2439,7 @@ trait Types
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
- if (args.nonEmpty) {
+ if (args ne Nil) {
if (sym.isAliasType) new AliasArgsTypeRef(pre, sym, args)
else if (sym.isAbstractType) new AbstractArgsTypeRef(pre, sym, args)
else new ClassArgsTypeRef(pre, sym, args)
@@ -2485,7 +2512,7 @@ trait Types
true
}
- def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isImplicit = (params ne Nil) && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
@@ -2493,7 +2520,7 @@ trait Types
override def paramss: List[List[Symbol]] = params :: resultType.paramss
- override def paramTypes = params map (_.tpe)
+ override def paramTypes = mapList(params)(symTpe) // OPT use mapList rather than .map
override def boundSyms = resultType.boundSyms ++ params
@@ -2656,8 +2683,55 @@ trait Types
override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
override def isHigherKinded = false
- override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
+ /** [SI-6169, SI-8197 -- companion to SI-1786]
+ *
+ * Approximation to improve the bounds of a Java-defined existential type,
+ * based on the bounds of the type parameters of the quantified type
+ * In Scala syntax, given a java-defined class C[T <: String], the existential type C[_]
+ * is improved to C[_ <: String] before skolemization, which captures (get it?) what Java does:
+ * enter the type paramers' bounds into the context when checking subtyping/type equality of existential types
+ *
+ * (Also tried doing this once during class file parsing or when creating the existential type,
+ * but that causes cyclic errors because it happens too early.)
+ */
+ private def sharpenQuantifierBounds(): Unit = {
+ /* Check that we're looking at rawToExistential's handiwork
+ * (`existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))`).
+ * We can't do this sharpening there because we'll run into cycles.
+ */
+ def rawToExistentialCreatedMe = (quantified corresponds underlying.typeArgs){ (q, a) => q.tpe =:= a }
+
+ if (underlying.typeSymbol.isJavaDefined && rawToExistentialCreatedMe) {
+ val tpars = underlying.typeSymbol.initialize.typeParams // TODO: is initialize needed?
+ debuglog(s"sharpen bounds: $this | ${underlying.typeArgs.map(_.typeSymbol)} <-- ${tpars.map(_.info)}")
+
+ foreach2(quantified, tpars) { (quant, tparam) =>
+ // TODO: check `tparam.info.substSym(tpars, quantified) <:< quant.info` instead (for some weird reason not working for test/t6169/ExistF)
+ // for now, crude approximation for the common case
+ if (quant.info.bounds.isEmptyBounds && !tparam.info.bounds.isEmptyBounds) {
+ // avoid creating cycles [pos/t2940] that consist of an existential quantifier's
+ // bounded by an existential type that unhygienically has that quantifier as its own quantifier
+ // (TODO: clone latter existential with fresh quantifiers -- not covering this case for now)
+ if ((existentialsInType(tparam.info) intersect quantified).isEmpty)
+ quant setInfo tparam.info.substSym(tpars, quantified)
+ }
+ }
+ }
+
+ _sharpenQuantifierBounds = false
+ }
+ private[this] var _sharpenQuantifierBounds = true
+
+ override def skolemizeExistential(owner: Symbol, origin: AnyRef) = {
+ // do this here because it's quite close to what Java does:
+ // when checking subtyping/type equality, enter constraints
+ // derived from the existentially quantified type into the typing environment
+ // (aka \Gamma, which tracks types for variables and constraints/kinds for types)
+ // as a nice bonus, delaying this until we need it avoids cyclic errors
+ if (_sharpenQuantifierBounds) sharpenQuantifierBounds
+
deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
+ }
private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
case TypeRef(_, sym, _) if (qset contains sym) =>
@@ -2878,6 +2952,8 @@ trait Types
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
+
+ override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]")
}
trait UntouchableTypeVar extends TypeVar {
@@ -2998,15 +3074,19 @@ trait Types
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
assert(tp != this, tp) // implies there is a cycle somewhere (?)
//println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addLoBound(tp, isNumericBound)
+ if (!sharesConstraints(tp)) {
+ undoLog record this
+ constr.addLoBound(tp, isNumericBound)
+ }
}
def addHiBound(tp: Type, isNumericBound: Boolean = false) {
// assert(tp != this)
//println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addHiBound(tp, isNumericBound)
+ if (!sharesConstraints(tp)) {
+ undoLog record this
+ constr.addHiBound(tp, isNumericBound)
+ }
}
// </region>
@@ -3018,6 +3098,16 @@ trait Types
case ConstantTrue => true
case tv: TypeVar => tv.suspended
}
+
+ /** `AppliedTypeVar`s share the same `TypeConstraint` with the `HKTypeVar` that it was spawned from.
+ * A type inference session can also have more than one ATV.
+ * If we don't detect that, we end up with "cyclic constraint" when we try to instantiate type parameters
+ * after solving in, pos/t8237
+ */
+ protected final def sharesConstraints(other: Type): Boolean = other match {
+ case other: TypeVar => constr == other.constr // SI-8237 avoid cycles. Details in pos/t8237.scala
+ case _ => false
+ }
private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) ConstantTrue else ConstantFalse
// SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
@@ -4120,7 +4210,7 @@ trait Types
&& (variance.isCovariant || isSubType(t2, t1, depth))
)
- corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
+ corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
}
def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
@@ -4304,7 +4394,7 @@ trait Types
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
- tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
+ mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
@@ -4553,7 +4643,10 @@ trait Types
private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
- private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+ private[scala] val treeTpe = (t: Tree) => t.tpe
+ private[scala] val symTpe = (sym: Symbol) => sym.tpe
+ private[scala] val symInfo = (sym: Symbol) => sym.info
+ private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations ne Nil
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 2a19441476..42f794736a 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -275,6 +275,7 @@ abstract class UnPickler {
def pflags = flags & PickledFlags
def finishSym(sym: Symbol): Symbol = {
+ markFlagsCompleted(sym)(mask = AllFlags)
sym.privateWithin = privateWithin
sym.info = (
if (atEnd) {
@@ -663,7 +664,7 @@ abstract class UnPickler {
private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter {
private val definedAtRunId = currentRunId
private val p = phase
- override def complete(sym: Symbol) : Unit = try {
+ protected def completeInternal(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
if (p ne null)
slowButSafeEnteringPhase(p) (sym setInfo tp)
@@ -673,6 +674,10 @@ abstract class UnPickler {
catch {
case e: MissingRequirementError => throw toTypeError(e)
}
+ override def complete(sym: Symbol) : Unit = {
+ completeInternal(sym)
+ if (!isCompilerUniverse) markAllCompleted(sym)
+ }
override def load(sym: Symbol) { complete(sym) }
}
@@ -680,8 +685,9 @@ abstract class UnPickler {
* of completed symbol to symbol at index `j`.
*/
private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
- override def complete(sym: Symbol) = try {
- super.complete(sym)
+ override def completeInternal(sym: Symbol) = try {
+ super.completeInternal(sym)
+
var alias = at(j, readSymbol)
if (alias.isOverloaded)
alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index e2159d30f5..564cbb1ce3 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -257,6 +257,12 @@ private[internal] trait TypeConstraints {
// println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tv => tv.instWithinBounds || util.andFalse(log(s"Inferred type for ${tv.originString} does not conform to bounds: ${tv.constr}")))
+
+ def logBounds(tv: TypeVar) = log {
+ val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}"
+ s"Inferred type for ${tv.originString} (${tv.inst}) $what"
+ }
+
+ tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv)))
}
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 09f4389b82..f427813c01 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -861,7 +861,7 @@ private[internal] trait TypeMaps {
class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
private val actuals = actuals0.toIndexedSeq
private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+ def existentialsNeeded: List[Symbol] = existentials.iterator.filter(_ ne null).toList
private object StableArg {
def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 7cc2952c96..d128521be8 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -47,6 +47,30 @@ trait Collections {
final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f)
final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f)
+ /** A version of List#map, specialized for List, and optimized to avoid allocation if `as` is empty */
+ final def mapList[A, B](as: List[A])(f: A => B): List[B] = if (as eq Nil) Nil else {
+ val head = new ::[B](f(as.head), Nil)
+ var tail: ::[B] = head
+ var rest = as.tail
+ while (rest ne Nil) {
+ val next = new ::(f(rest.head), Nil)
+ tail.tl = next
+ tail = next
+ rest = rest.tail
+ }
+ head
+ }
+
+ final def collectFirst[A, B](as: List[A])(pf: PartialFunction[A, B]): Option[B] = {
+ @tailrec
+ def loop(rest: List[A]): Option[B] = rest match {
+ case Nil => None
+ case a :: as if pf.isDefinedAt(a) => Some(pf(a))
+ case a :: as => loop(as)
+ }
+ loop(as)
+ }
+
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
var ys1 = xs1
@@ -99,15 +123,19 @@ trait Collections {
else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
}
final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
- val lb = new ListBuffer[C]
+ var lb: ListBuffer[C] = null
var ys1 = xs1
var ys2 = xs2
while (!ys1.isEmpty && !ys2.isEmpty) {
- lb ++= f(ys1.head, ys2.head)
+ val cs = f(ys1.head, ys2.head)
+ if (cs ne Nil) {
+ if (lb eq null) lb = new ListBuffer[C]
+ lb ++= cs
+ }
ys1 = ys1.tail
ys2 = ys2.tail
}
- lb.toList
+ if (lb eq null) Nil else lb.result
}
final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 039e75fbee..5ccdc15a03 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -43,7 +43,7 @@ abstract class Attachments { self =>
/** Check underlying payload contains an instance of type `T`. */
def contains[T: ClassTag]: Boolean =
- all exists matchesTag[T]
+ !isEmpty && (all exists matchesTag[T])
/** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
* Replaces an existing payload of the same type, if exists.
@@ -57,6 +57,8 @@ abstract class Attachments { self =>
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
else new NonemptyAttachments[Pos](this.pos, newAll)
}
+
+ def isEmpty: Boolean = true
}
// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
@@ -64,4 +66,5 @@ abstract class Attachments { self =>
private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
type Pos = P
def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+ override def isEmpty: Boolean = false
}
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
index 222ae43d79..68e07dd319 100644
--- a/src/reflect/scala/reflect/macros/Evals.scala
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -17,13 +17,13 @@ trait Evals {
* permitted by the shape of the arguments.
*
* Known issues: because of [[https://issues.scala-lang.org/browse/SI-5748 https://issues.scala-lang.org/browse/SI-5748]]
- * trees being evaluated first need to undergo `resetAllAttrs`. Resetting symbols and types
+ * trees being evaluated first need to undergo `untypecheck`. Resetting symbols and types
* mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first.
*
* {{{
* scala> def impl(c: Context)(x: c.Expr[String]) = {
- * | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate))
- * | println(s"compile-time value is: \${c.eval(x1)}")
+ * | val x1 = c.Expr[String](c.untypecheck(x.tree.duplicate))
+ * | println(s"compile-time value is: ${c.eval(x1)}")
* | x
* | }
* impl: (c: Context)(x: c.Expr[String])c.Expr[String]
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index 87de442921..6c077de1d2 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -68,12 +68,6 @@ trait Typers {
*/
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
- /** Recursively resets symbols and types in a given tree.
- * WARNING: Don't use this API, go for [[untypecheck]] instead.
- */
- @deprecated("Use `c.untypecheck` instead", "2.11.0")
- def resetAllAttrs(tree: Tree): Tree
-
/** Recursively resets locally defined symbols and types in a given tree.
* WARNING: Don't use this API, go for [[untypecheck]] instead.
*/
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index d84e6aa737..bc5c8b2840 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -122,7 +122,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def setType(tp: Type): Tree
/** Like `setType`, but if this is a previously empty TypeTree that
- * fact is remembered so that resetAllAttrs will snap back.
+ * fact is remembered so that `untypecheck` will snap back.
*
* \@PP: Attempting to elaborate on the above, I find: If defineType
* is called on a TypeTree whose type field is null or NoType,
@@ -130,7 +130,8 @@ abstract class Universe extends scala.reflect.api.Universe {
* ResetAttrsTraverser, which nulls out the type field of TypeTrees
* for which wasEmpty is true, leaving the others alone.
*
- * resetAllAttrs is used in situations where some speculative
+ * `untypecheck` (or `resetAttrs` in compiler parlance) is used
+ * in situations where some speculative
* typing of a tree takes place, fails, and the tree needs to be
* returned to its former state to try again. So according to me:
* using `defineType` instead of `setType` is how you communicate
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 7cc5176507..68c67bb1f8 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -42,7 +42,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// overriden by ReflectGlobal
def rootClassLoader: ClassLoader = this.getClass.getClassLoader
- trait JavaClassCompleter extends FlagAssigningCompleter
+ trait JavaClassCompleter
def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized {
mirrors get cl match {
@@ -63,10 +63,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private[reflect] lazy val runDefinitions = new definitions.RunDefinitions // only one "run" in the reflection universe
import runDefinitions._
- override lazy val RootPackage = new RootPackage with SynchronizedTermSymbol
- override lazy val RootClass = new RootClass with SynchronizedModuleClassSymbol
- override lazy val EmptyPackage = new EmptyPackage with SynchronizedTermSymbol
- override lazy val EmptyPackageClass = new EmptyPackageClass with SynchronizedModuleClassSymbol
+ override lazy val RootPackage = (new RootPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val RootClass = (new RootClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val EmptyPackage = (new EmptyPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags)
+ override lazy val EmptyPackageClass = (new EmptyPackageClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags)
/** The lazy type for root.
*/
@@ -575,6 +575,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val bytes = ssig.getBytes
val len = ByteCodecs.decode(bytes)
unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
+ markAllCompleted(clazz, module)
case None =>
loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
case Some(slsig) =>
@@ -583,6 +584,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val len = ByteCodecs.decode(encoded)
val decoded = encoded.take(len)
unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
+ markAllCompleted(clazz, module)
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
@@ -605,6 +607,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName))
.setInfo(new TypeParamCompleter(jtvar))
+ markFlagsCompleted(tparam)(mask = AllFlags)
tparamCache enter (jtvar, tparam)
tparam
}
@@ -617,6 +620,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol) = complete(sym)
override def complete(sym: Symbol) = {
sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny))
+ markAllCompleted(sym)
}
}
@@ -655,7 +659,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* @param module The Scala companion object for which info is copied
* @param jclazz The Java class
*/
- private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter {
+ private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAgnosticCompleter {
+ // one doesn't need to do non-trivial computations to assign flags for Java-based reflection artifacts
+ // therefore I'm moving flag-assigning logic from completion to construction
+ val flags = jclazz.scalaFlags
+ clazz setFlag (flags | JAVA)
+ if (module != NoSymbol) {
+ module setFlag (flags & PRIVATE | JAVA)
+ module.moduleClass setFlag (flags & PRIVATE | JAVA)
+ }
+ markFlagsCompleted(clazz, module)(mask = AllFlags)
/** used to avoid cycles while initializing classes */
private var parentsLevel = 0
@@ -665,12 +678,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol): Unit = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
- val flags = jclazz.scalaFlags
- clazz setFlag (flags | JAVA)
- if (module != NoSymbol) {
- module setFlag (flags & PRIVATE | JAVA)
- module.moduleClass setFlag (flags & PRIVATE | JAVA)
- }
propagatePackageBoundary(jclazz, relatedSymbols: _*)
copyAnnotations(clazz, jclazz)
@@ -686,6 +693,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def complete(sym: Symbol): Unit = {
load(sym)
completeRest()
+ markAllCompleted(clazz, module)
}
def completeRest(): Unit = gilSynchronized {
@@ -738,6 +746,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
completeRest()
+ markAllCompleted(clazz, module)
}
}
}
@@ -892,6 +901,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val pkg = owner.newPackage(name)
pkg.moduleClass setInfo new LazyPackageType
pkg setInfoAndEnter pkg.moduleClass.tpe
+ markFlagsCompleted(pkg)(mask = AllFlags)
info("made Scala "+pkg)
pkg
} else
@@ -1074,6 +1084,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
fieldCache.enter(jfield, field)
propagatePackageBoundary(jfield, field)
copyAnnotations(field, jfield)
+ markAllCompleted(field)
field
}
@@ -1100,11 +1111,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
setMethType(meth, tparams, paramtpes, resulttpe)
propagatePackageBoundary(jmeth.javaFlags, meth)
copyAnnotations(meth, jmeth)
-
- if (jmeth.javaFlags.isVarargs)
- meth modifyInfo arrayToRepeated
- else
- meth
+ if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated
+ markAllCompleted(meth)
+ meth
}
/**
@@ -1127,6 +1136,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
+ markAllCompleted(constr)
constr
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index 8811b5513e..fb893cbff1 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -32,8 +32,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
// inaccessible: this._skolemizationLevel
// inaccessible: this._undoLog
// inaccessible: this._intersectionWitness
- // inaccessible: this._volatileRecursions
- // inaccessible: this._pendingVolatiles
// inaccessible: this._subsametypeRecursions
// inaccessible: this._pendingSubTypes
// inaccessible: this._basetypeRecursions
@@ -206,6 +204,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.NoSymbol
this.CyclicReference
// inaccessible: this.TypeHistory
+ this.SymbolOps
this.TermName
this.TypeName
this.Liftable
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 30a3855d70..c56bc28d90 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -6,6 +6,7 @@ import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
+import scala.reflect.internal.Flags._
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -17,6 +18,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* is found, a package is created instead.
*/
class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
+ markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags)
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
@@ -24,6 +26,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
+ // NOTE: can't mark as thread-safe here, because unpickleClass might decide to delegate to FromJavaClassCompleter
+ // if (!isCompilerUniverse) markAllCompleted(clazz, module)
}
}
override def load(sym: Symbol) = complete(sym)
@@ -64,6 +68,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
// override def safeToString = pkgClass.toString
openPackageModule(sym)
+ markAllCompleted(sym)
}
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index ddbf3bd629..02155578f8 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -28,19 +28,4 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w
* in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
*/
override def isCompilerUniverse = false
-
- /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests.
- *
- * scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions
- * that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors.
- * We could probably fix those, but at the moment it's too risky.
- *
- * Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions.
- * These assumptions are taken care of in this overriden `shouldTriggerCompleter` method.
- */
- override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
- completer match {
- case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0
- case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask)
- }
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 1a232c8de1..f5e16c6640 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -4,6 +4,7 @@ package runtime
import scala.reflect.io.AbstractFile
import scala.collection.{ immutable, mutable }
+import scala.reflect.internal.Flags._
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -31,23 +32,105 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
trait SynchronizedSymbol extends Symbol {
- def gilSynchronizedIfNotInited[T](body: => T): T = {
- // TODO JZ desired, but prone to race conditions. We need the runtime reflection based
- // type completers to establish a memory barrier upon initialization. Maybe a volatile
- // write? We need to consult with the experts here. Until them, lock pessimistically.
- //
- // `run/reflection-sync-subtypes.scala` fails about 1/50 times otherwise.
- //
- // if (isFullyInitialized) body
- // else gilSynchronized { body }
+ /** (Things written in this comment only applies to runtime reflection. Compile-time reflection,
+ * especially across phases and runs, is somewhat more complicated, but we won't be touching it,
+ * because at the moment we only care about synchronizing runtime reflection).
+ *
+ * As it has been noted on multiple occasions, generally speaking, reflection artifacts aren't thread-safe.
+ * Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number
+ * of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make
+ * sure that things stay deterministic). However, in case of symbols there's hope, because it's only during
+ * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable
+ * (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe.
+ *
+ * Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's,
+ * even when a symbol finishes its initialization and becomes immutable, its info forever remains mutable.
+ * Therefore even if we no longer need to synchronize a PackageClassSymbol after it's initialized, we still have to take
+ * care of its ClassInfoType (or, more precisely, of the underlying Scope), but that's done elsewhere, and
+ * here we don't need to worry about that.
+ *
+ * Okay, so now we simply check `Symbol.isInitialized` and if it's true, then everything's fine? Haha, nope!
+ * The thing is that some completers call sym.setInfo when still in-flight and then proceed with initialization
+ * (e.g. see LazyPackageType). Consequently, setInfo sets _validTo to current period, which means that after
+ * a call to setInfo isInitialized will start returning true. Unfortunately, this doesn't mean that info becomes
+ * ready to be used, because subsequent initialization might change the info.
+ *
+ * Therefore we need to somehow distinguish between initialized and really initialized symbol states.
+ * Okay, let's do it on per-completer basis. We have seven kinds of completers to worry about:
+ * 1) LazyPackageType that initializes packages and their underlying package classes
+ * 2) TopClassCompleter that initializes top-level Scala-based class-module companion pairs of static definitions
+ * 3) LazyTypeRef and LazyTypeRefAndAlias set up by TopClassCompleter that initialize (transitive members) of top-level classes/modules
+ * 4) FromJavaClassCompleter that does the same for both top-level and non-toplevel Java-based classes/modules
+ * 5) Fully-initialized signatures of non-class/module Java-based reflection artifacts
+ * 6) Importing completer that transfers metadata from one universe to another
+ * 7) Signatures of special symbols such as roots and symbolsNotPresentInBytecode
+ *
+ * The mechanisms underlying completion are quite complex, and it'd be only natural to suppose that over time we're going to overlook something.
+ * Wrt isThreadsafe we could have two wrong situations: false positives (isThreadsafe = true, but the symbol isn't actually threadsafe)
+ * and false negatives (isThreadsafe = false, but the symbol is actually threadsafe). However, even though both are wrong, only the former
+ * is actively malicious. Indeed, false positives might lead to races, inconsistent state and crashes, while the latter would only cause
+ * `initialize` to be called and a gil to be taken on every potentially auto-initializable operation. Unpleasant yes, but still robust.
+ *
+ * What makes me hopeful is that:
+ * 1) By default (e.g. if some new completion mechanism gets introduced for a special flavor of symbols and we forget to call markCompleted)
+ * isThreadsafe is always in false negative state, which is unpleasant but safe.
+ * 2) Calls to `markCompleted` which are the only potential source of erroneous behavior are few and are relatively easy to place:
+ * just put them just before your completer's `complete` returns, and you should be fine.
+ *
+ * upd. Actually, there's another problem of not keeping initialization mask up-to-date. If we're not careful enough,
+ * then it might so happen that getting a certain flag that the compiler assumes to be definitively set will spuriously
+ * return isThreadsafe(purpose = FlagsOp(<flag>)) = false and that will lead to spurious auto-initialization,
+ * which will cause an SO or a cyclic reference or some other crash. I've done my best to go through all possible completers
+ * and call `markFlagsCompleted` where appropriate, but again over time something might be overlooked, so to guard against that
+ * I'm only considering TopLevelPickledFlags to be sources of potential initialization. This ensures that such system flags as
+ * isMethod, isModule or isPackage are never going to auto-initialize.
+ */
+ override def isThreadsafe(purpose: SymbolOps) = {
+ if (isCompilerUniverse) false
+ else if (_initialized) true
+ else purpose.isFlagRelated && (_initializationMask & purpose.mask & TopLevelPickledFlags) == 0
+ }
+
+ /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders
+ * about the status of initialization of the underlying symbol.
+ *
+ * Unfortunately, it's not as easy as just introducing the `markThreadsafe` method that would be called
+ * by the completers when they are really done (as opposed to `setInfo` that, as mentioned above, doesn't mean anything).
+ *
+ * Since we also want to auto-initialize symbols when certain methods are being called (`Symbol.hasFlag` for example),
+ * we need to track the identity of the initializer, so as to block until initialization is complete if the caller
+ * comes from a different thread, but to skip auto-initialization if we're the initializing thread.
+ *
+ * Just a volatile var is fine, because:
+ * 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized
+ * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts.
+ * 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone
+ * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet)
+ * or a no-op (if the symbol is already inited), and that is fine in both cases.
+ *
+ * upd. It looks like we also need to keep track of a mask of initialized flags to make sure
+ * that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag).
+ * Due to the same reasoning as above, a single volatile var is enough for to store the mask.
+ */
+ @volatile private[this] var _initialized = false
+ @volatile private[this] var _initializationMask = TopLevelPickledFlags
+ override def markFlagsCompleted(mask: Long): this.type = { _initializationMask = _initializationMask & ~mask; this }
+ override def markAllCompleted(): this.type = { _initializationMask = 0L; _initialized = true; this }
+
+ def gilSynchronizedIfNotThreadsafe[T](body: => T): T = {
+ // TODO: debug and fix the race that doesn't allow us uncomment this optimization
+ // if (isCompilerUniverse || isThreadsafe(purpose = AllOps)) body
+ // else gilSynchronized { body }
gilSynchronized { body }
}
- override def validTo = gilSynchronizedIfNotInited { super.validTo }
- override def info = gilSynchronizedIfNotInited { super.info }
- override def rawInfo: Type = gilSynchronizedIfNotInited { super.rawInfo }
+ override def validTo = gilSynchronizedIfNotThreadsafe { super.validTo }
+ override def info = gilSynchronizedIfNotThreadsafe { super.info }
+ override def rawInfo: Type = gilSynchronizedIfNotThreadsafe { super.rawInfo }
+ override def typeSignature: Type = gilSynchronizedIfNotThreadsafe { super.typeSignature }
+ override def typeSignatureIn(site: Type): Type = gilSynchronizedIfNotThreadsafe { super.typeSignatureIn(site) }
- override def typeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ override def typeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
if (isCompilerUniverse) super.typeParams
else {
if (isMonomorphicType) Nil
@@ -63,7 +146,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
}
}
}
- override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe {
if (isCompilerUniverse) super.unsafeTypeParams
else {
if (isMonomorphicType) Nil
@@ -71,8 +154,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
}
}
- override def isStable: Boolean = gilSynchronized { super.isStable }
-
// ------ creators -------------------------------------------------------------------
override protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol =
@@ -126,7 +207,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
// we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
// unfortunately we cannot elide this lock, because the cache depends on `pre`
private lazy val typeAsMemberOfLock = new Object
- override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotInited { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
+ override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
}
trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
@@ -135,7 +216,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
// unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on
// temporarily assigning NoType to tpeCache to detect cyclic reference errors
private lazy val tpeLock = new Object
- override def tpe_* : Type = gilSynchronizedIfNotInited { tpeLock.synchronized { super.tpe_* } }
+ override def tpe_* : Type = gilSynchronizedIfNotThreadsafe { tpeLock.synchronized { super.tpe_* } }
}
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index 83d471f91e..9bcf85dd6f 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -50,13 +50,6 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]())
override def intersectionWitness = _intersectionWitness.get
- private lazy val _volatileRecursions = mkThreadLocalStorage(0)
- override def volatileRecursions = _volatileRecursions.get
- override def volatileRecursions_=(value: Int) = _volatileRecursions.set(value)
-
- private lazy val _pendingVolatiles = mkThreadLocalStorage(new mutable.HashSet[Symbol])
- override def pendingVolatiles = _pendingVolatiles.get
-
private lazy val _subsametypeRecursions = mkThreadLocalStorage(0)
override def subsametypeRecursions = _subsametypeRecursions.get
override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value)
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index e5dbaa3fd5..5ea1443a19 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -122,6 +122,11 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
)
+ val docAuthor = BooleanSetting (
+ "-author",
+ "Include authors."
+ )
+
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -207,7 +212,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
- docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docAuthor, docDiagrams, docDiagramsDebug, docDiagramsDotPath,
docDiagramsDotTimeout, docDiagramsDotRestart,
docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 119d4e0143..26ee005d3e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -351,6 +351,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
+ val authorComment =
+ if (! s.docAuthor || mbr.comment.isEmpty ||
+ mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NodeSeq.Empty
+ else <div class="comment cmt">
+ {if (mbr.comment.get.authors.size > 1) <h6>Authors:</h6> else <h6>Author:</h6>}
+ { mbr.comment.get.authors map bodyToHtml}
+ </div>
+
val paramComments = {
val prs: List[ParameterEntity] = mbr match {
case cls: Class => cls.typeParams ::: cls.valueParams.flatten
@@ -681,7 +689,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
+ memberComment ++ authorComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 8f217e087c..ef84ac42ba 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -10,6 +10,7 @@ import diagram._
import scala.collection._
import scala.util.matching.Regex
+import scala.reflect.macros.internal.macroImpl
import symtab.Flags
import io._
@@ -80,7 +81,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- def annotations = sym.annotations.map(makeAnnotation)
+ def annotations = sym.annotations.filterNot(_.tpe =:= typeOf[macroImpl]).map(makeAnnotation)
def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
def isType = sym.name.isTypeName
}
@@ -145,6 +146,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
+ if (sym.isMacro) fgs += Paragraph(Text("macro"))
fgs.toList
}
def deprecation =
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
index 549abd5e64..b6951f71ee 100644
--- a/test/files/jvm/t6941/Analyzed_1.scala
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -6,6 +6,6 @@ class SameBytecode {
}
def b(xs: List[Int]) = xs match {
- case xs: ::[Int] => xs.hd$1
+ case xs: ::[Int] => xs.head
}
} \ No newline at end of file
diff --git a/test/files/neg/accesses.check b/test/files/neg/accesses.check
index 5a5e03233e..db58af12ce 100644
--- a/test/files/neg/accesses.check
+++ b/test/files/neg/accesses.check
@@ -1,3 +1,7 @@
+accesses.scala:23: error: overriding method f2 in class A of type ()Unit;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Unit = ()
+ ^
accesses.scala:24: error: overriding method f3 in class A of type ()Unit;
method f3 has weaker access privileges; it should be at least protected
private[p2] def f3(): Unit = ()
@@ -10,4 +14,4 @@ accesses.scala:26: error: overriding method f5 in class A of type ()Unit;
method f5 has weaker access privileges; it should be at least protected[p1]
protected[p2] def f5(): Unit
^
-three errors found
+four errors found
diff --git a/test/files/neg/accesses2.check b/test/files/neg/accesses2.check
index 554a7b4c81..66cf9a116e 100644
--- a/test/files/neg/accesses2.check
+++ b/test/files/neg/accesses2.check
@@ -1,4 +1,12 @@
+accesses2.scala:6: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Int = 1
+ ^
accesses2.scala:5: error: class B1 needs to be abstract, since method f2 in class A of type ()Int is not defined
class B1 extends A {
^
-one error found
+accesses2.scala:9: error: overriding method f2 in class A of type ()Int;
+ method f2 has weaker access privileges; it should not be private
+ private def f2(): Int = 1
+ ^
+three errors found
diff --git a/test/files/neg/missing-param-type-tuple.check b/test/files/neg/missing-param-type-tuple.check
index bc46ba1023..3a4258ff8c 100644
--- a/test/files/neg/missing-param-type-tuple.check
+++ b/test/files/neg/missing-param-type-tuple.check
@@ -1,6 +1,6 @@
missing-param-type-tuple.scala:3: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 2-Tuple.
- Consider a pattern matching anoynmous function, `{ case (a, b) => ... }`
+ Consider a pattern matching anonymous function, `{ case (a, b) => ... }`
val x: ((Int, Int)) => Int = (a, b) => 0
^
missing-param-type-tuple.scala:3: error: missing parameter type
@@ -8,7 +8,7 @@ missing-param-type-tuple.scala:3: error: missing parameter type
^
missing-param-type-tuple.scala:5: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 3-Tuple.
- Consider a pattern matching anoynmous function, `{ case (param1, ..., param3) => ... }`
+ Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }`
val y: ((Int, Int, Int)) => Int = (a, b, !!) => 0
^
missing-param-type-tuple.scala:5: error: missing parameter type
@@ -19,7 +19,7 @@ missing-param-type-tuple.scala:5: error: missing parameter type
^
missing-param-type-tuple.scala:7: error: missing parameter type
Note: The expected type requires a one-argument function accepting a 3-Tuple.
- Consider a pattern matching anoynmous function, `{ case (param1, ..., param3) => ... }`
+ Consider a pattern matching anonymous function, `{ case (param1, ..., param3) => ... }`
val z: ((Int, Int, Int)) => Int = (a, NotAVariablePatternName, c) => 0
^
missing-param-type-tuple.scala:7: error: missing parameter type
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 880ddc4327..20ddd55f1f 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -88,7 +88,7 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
- required: ?T
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
Error occurred in an application involving default arguments.
test4()
^
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
index 457f497f2f..703846ad62 100644
--- a/test/files/neg/stringinterpolation_macro-neg.check
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -1,61 +1,61 @@
-stringinterpolation_macro-neg.scala:8: error: too few parts
+stringinterpolation_macro-neg.scala:13: error: there are no parts
new StringContext().f()
^
-stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:14: error: too few arguments for interpolated string
new StringContext("", " is ", "%2d years old").f(s)
^
-stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string
+stringinterpolation_macro-neg.scala:15: error: too many arguments for interpolated string
new StringContext("", " is ", "%2d years old").f(s, d, d)
^
-stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string
+stringinterpolation_macro-neg.scala:16: error: too few arguments for interpolated string
new StringContext("", "").f()
^
-stringinterpolation_macro-neg.scala:14: error: type mismatch;
+stringinterpolation_macro-neg.scala:19: error: type mismatch;
found : String
required: Boolean
f"$s%b"
^
-stringinterpolation_macro-neg.scala:15: error: type mismatch;
+stringinterpolation_macro-neg.scala:20: error: type mismatch;
found : String
required: Char
f"$s%c"
^
-stringinterpolation_macro-neg.scala:16: error: type mismatch;
+stringinterpolation_macro-neg.scala:21: error: type mismatch;
found : Double
required: Char
f"$f%c"
^
-stringinterpolation_macro-neg.scala:17: error: type mismatch;
+stringinterpolation_macro-neg.scala:22: error: type mismatch;
found : String
required: Int
f"$s%x"
^
-stringinterpolation_macro-neg.scala:18: error: type mismatch;
+stringinterpolation_macro-neg.scala:23: error: type mismatch;
found : Boolean
required: Int
f"$b%d"
^
-stringinterpolation_macro-neg.scala:19: error: type mismatch;
+stringinterpolation_macro-neg.scala:24: error: type mismatch;
found : String
required: Int
f"$s%d"
^
-stringinterpolation_macro-neg.scala:20: error: type mismatch;
+stringinterpolation_macro-neg.scala:25: error: type mismatch;
found : Double
required: Int
f"$f%o"
^
-stringinterpolation_macro-neg.scala:21: error: type mismatch;
+stringinterpolation_macro-neg.scala:26: error: type mismatch;
found : String
required: Double
f"$s%e"
^
-stringinterpolation_macro-neg.scala:22: error: type mismatch;
+stringinterpolation_macro-neg.scala:27: error: type mismatch;
found : Boolean
required: Double
f"$b%f"
^
-stringinterpolation_macro-neg.scala:27: error: type mismatch;
+stringinterpolation_macro-neg.scala:32: error: type mismatch;
found : String
required: Int
Note that implicit conversions are not applicable because they are ambiguous:
@@ -64,7 +64,109 @@ Note that implicit conversions are not applicable because they are ambiguous:
are possible conversion functions from String to Int
f"$s%d"
^
-stringinterpolation_macro-neg.scala:30: error: illegal conversion character
+stringinterpolation_macro-neg.scala:35: error: illegal conversion character 'i'
f"$s%i"
^
-15 errors found
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '+'
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ' '
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '0'
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag ','
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:38: error: Illegal flag '('
+ f"$s%+ 0,(s"
+ ^
+stringinterpolation_macro-neg.scala:39: error: Only '-' allowed for c conversion
+ f"$c%#+ 0,(c"
+ ^
+stringinterpolation_macro-neg.scala:40: error: # not allowed for d conversion
+ f"$d%#d"
+ ^
+stringinterpolation_macro-neg.scala:41: error: ',' only allowed for d conversion of integral types
+ f"$d%,x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use '+' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use ' ' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:42: error: only use '(' for BigInt conversions to o, x, X
+ f"$d%+ (x"
+ ^
+stringinterpolation_macro-neg.scala:43: error: ',' not allowed for a, A
+ f"$f%,(a"
+ ^
+stringinterpolation_macro-neg.scala:43: error: '(' not allowed for a, A
+ f"$f%,(a"
+ ^
+stringinterpolation_macro-neg.scala:44: error: Only '-' allowed for date/time conversions
+ f"$t%#+ 0,(tT"
+ ^
+stringinterpolation_macro-neg.scala:47: error: precision not allowed
+ f"$c%.2c"
+ ^
+stringinterpolation_macro-neg.scala:48: error: precision not allowed
+ f"$d%.2d"
+ ^
+stringinterpolation_macro-neg.scala:49: error: precision not allowed
+ f"%.2%"
+ ^
+stringinterpolation_macro-neg.scala:50: error: precision not allowed
+ f"%.2n"
+ ^
+stringinterpolation_macro-neg.scala:51: error: precision not allowed
+ f"$f%.2a"
+ ^
+stringinterpolation_macro-neg.scala:52: error: precision not allowed
+ f"$t%.2tT"
+ ^
+stringinterpolation_macro-neg.scala:55: error: No last arg
+ f"%<s"
+ ^
+stringinterpolation_macro-neg.scala:56: error: No last arg
+ f"%<c"
+ ^
+stringinterpolation_macro-neg.scala:57: error: No last arg
+ f"%<tT"
+ ^
+stringinterpolation_macro-neg.scala:58: error: Argument index out of range
+ f"${8}%d ${9}%d%3$$d"
+ ^
+stringinterpolation_macro-neg.scala:59: error: Argument index out of range
+ f"${8}%d ${9}%d%0$$d"
+ ^
+stringinterpolation_macro-neg.scala:62: warning: Index is not this arg
+ f"${8}%d ${9}%1$$d"
+ ^
+stringinterpolation_macro-neg.scala:63: warning: Argument index ignored if '<' flag is present
+ f"$s%s $s%s %1$$<s"
+ ^
+stringinterpolation_macro-neg.scala:64: warning: Index is not this arg
+ f"$s%s $s%1$$s"
+ ^
+stringinterpolation_macro-neg.scala:67: error: type mismatch;
+ found : String
+ required: java.util.Formattable
+ f"$s%#s"
+ ^
+stringinterpolation_macro-neg.scala:70: error: 'G' doesn't seem to be a date or time conversion
+ f"$t%tG"
+ ^
+stringinterpolation_macro-neg.scala:71: error: Date/time conversion must have two characters
+ f"$t%t"
+ ^
+stringinterpolation_macro-neg.scala:72: error: Missing conversion operator in '%10.5'; use %% for literal %, %n for newline
+ f"$s%10.5"
+ ^
+stringinterpolation_macro-neg.scala:75: error: conversions must follow a splice; use %% for literal %, %n for newline
+ f"${d}random-leading-junk%d"
+ ^
+three warnings found
+45 errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala
index ac9d97d678..3869d42d66 100644
--- a/test/files/neg/stringinterpolation_macro-neg.scala
+++ b/test/files/neg/stringinterpolation_macro-neg.scala
@@ -3,6 +3,11 @@ object Test extends App {
val d = 8
val b = false
val f = 3.14159
+ val c = 'c'
+ val t = new java.util.Date
+ val x = new java.util.Formattable {
+ def formatTo(ff: java.util.Formatter, g: Int, w: Int, p: Int): Unit = ff format "xxx"
+ }
// 1) number of arguments
new StringContext().f()
@@ -28,4 +33,44 @@ object Test extends App {
}
f"$s%i"
+
+ // 3) flag mismatches
+ f"$s%+ 0,(s"
+ f"$c%#+ 0,(c"
+ f"$d%#d"
+ f"$d%,x"
+ f"$d%+ (x"
+ f"$f%,(a"
+ f"$t%#+ 0,(tT"
+
+ // 4) bad precisions
+ f"$c%.2c"
+ f"$d%.2d"
+ f"%.2%"
+ f"%.2n"
+ f"$f%.2a"
+ f"$t%.2tT"
+
+ // 5) bad indexes
+ f"%<s"
+ f"%<c"
+ f"%<tT"
+ f"${8}%d ${9}%d%3$$d"
+ f"${8}%d ${9}%d%0$$d"
+
+ // warnings
+ f"${8}%d ${9}%1$$d"
+ f"$s%s $s%s %1$$<s"
+ f"$s%s $s%1$$s"
+
+ // 6) bad arg types
+ f"$s%#s"
+
+ // 7) misunderstood conversions
+ f"$t%tG"
+ f"$t%t"
+ f"$s%10.5"
+
+ // 8) other brain failures
+ f"${d}random-leading-junk%d"
}
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
index d2c40f4df8..61c33f99b1 100644
--- a/test/files/neg/t7325.check
+++ b/test/files/neg/t7325.check
@@ -1,13 +1,13 @@
-t7325.scala:2: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:2: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%")
^
-t7325.scala:4: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:4: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%%%")
^
-t7325.scala:6: error: conversions must follow a splice; use %% for literal %, %n for newline
+t7325.scala:6: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"%%%%%")
^
-t7325.scala:16: error: wrong conversion string
+t7325.scala:16: error: Missing conversion operator in '%'; use %% for literal %, %n for newline
println(f"${0}%")
^
t7325.scala:19: error: conversions must follow a splice; use %% for literal %, %n for newline
diff --git a/test/files/neg/t8143a.check b/test/files/neg/t8143a.check
new file mode 100644
index 0000000000..4e11000a2a
--- /dev/null
+++ b/test/files/neg/t8143a.check
@@ -0,0 +1,5 @@
+t8143a.scala:2: error: overriding method f in class Foo of type => Int;
+ method f has weaker access privileges; it should not be private
+class Bar extends Foo { private def f = 10 }
+ ^
+one error found
diff --git a/test/files/neg/t8143a.scala b/test/files/neg/t8143a.scala
new file mode 100644
index 0000000000..4ec539e671
--- /dev/null
+++ b/test/files/neg/t8143a.scala
@@ -0,0 +1,15 @@
+class Foo { def f = 5 }
+class Bar extends Foo { private def f = 10 }
+
+
+class Foo1 { private def f = 5 }
+class Bar1 extends Foo1 { def f = 10 } // okay
+
+class Foo2 { private def f = 5 }
+class Bar2 extends Foo2 { private def f = 10 } // okay
+
+class Foo3 { private[this] def f = 5 }
+class Bar3 extends Foo3 { private def f = 10 } // okay
+
+class Foo4 { private def f = 5 }
+class Bar4 extends Foo4 { private[this] def f = 10 } // okay \ No newline at end of file
diff --git a/test/files/neg/t8228.check b/test/files/neg/t8228.check
new file mode 100644
index 0000000000..02eff4b1b7
--- /dev/null
+++ b/test/files/neg/t8228.check
@@ -0,0 +1,4 @@
+t8228.scala:4: error: recursive value foo needs type
+ val foo = foo(null)
+ ^
+one error found
diff --git a/test/files/neg/t8228.scala b/test/files/neg/t8228.scala
new file mode 100644
index 0000000000..19d71aeab4
--- /dev/null
+++ b/test/files/neg/t8228.scala
@@ -0,0 +1,7 @@
+object X {
+ def bar = {
+ def foo(x: Any) = ""
+ val foo = foo(null)
+ foo(null) // cycle in isApplicableBasedOnArity
+ }
+}
diff --git a/test/files/neg/t8237-default.check b/test/files/neg/t8237-default.check
new file mode 100644
index 0000000000..59fe21ed03
--- /dev/null
+++ b/test/files/neg/t8237-default.check
@@ -0,0 +1,13 @@
+t8237-default.scala:5: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]]
+ test4(test4$default$1)
+ ^
+t8237-default.scala:5: error: type mismatch;
+ found : List[Int]
+ required: T[T[List[T[X forSome { type X }]]]]
+ test4(test4$default$1)
+ ^
+two errors found
diff --git a/test/files/neg/t8237-default.scala b/test/files/neg/t8237-default.scala
new file mode 100644
index 0000000000..f695aa523f
--- /dev/null
+++ b/test/files/neg/t8237-default.scala
@@ -0,0 +1,29 @@
+// This test case was extracte from `names-defaults-neg.scala`
+// It pinpoints an improvement an error message that results from
+// a type inference failure
+object Test extends App {
+ test4(test4$default$1)
+
+ def test4[T[P]](x: T[T[List[T[X forSome { type X }]]]]) = ???
+ def test4$default$1[T[P]]: List[Int] = ???
+}
+
+/*
+OLD:
+ no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T
+ test4(test4$default$1)
+ ^
+
+NEW:
+
+no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])Nothing exist so that it can be applied to arguments (List[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : List[Int]
+ required: ?T[?T[List[?T[X forSome { type X }]]]
+ test4(test4$default$1)
+*/
diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala
index e312f9abbf..84a01bcce5 100644
--- a/test/files/pos/annotated-original/M_1.scala
+++ b/test/files/pos/annotated-original/M_1.scala
@@ -2,6 +2,6 @@ import language.experimental.macros
import scala.reflect.macros.blackbox.Context
object M {
- def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.resetLocalAttrs(a.tree))
+ def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree))
def m(a: Any) = macro impl
}
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index b02864b994..fdf9c72c31 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -44,7 +44,7 @@ object Macros {
val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice
def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice
})
- val untyped = c.resetLocalAttrs(template.tree)
+ val untyped = c.untypecheck(template.tree)
c.Expr[T => U](untyped)
case _ => sys.error("Bad function type")
diff --git a/test/files/pos/t6169/Exist.java b/test/files/pos/t6169/Exist.java
new file mode 100644
index 0000000000..dfc6b36b33
--- /dev/null
+++ b/test/files/pos/t6169/Exist.java
@@ -0,0 +1,4 @@
+public class Exist<T extends String> {
+ // java helpfully re-interprets Exist<?> as Exist<? extends String>
+ public Exist<?> foo() { throw new RuntimeException(); }
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/ExistF.java b/test/files/pos/t6169/ExistF.java
new file mode 100644
index 0000000000..70fabd74cf
--- /dev/null
+++ b/test/files/pos/t6169/ExistF.java
@@ -0,0 +1,4 @@
+public class ExistF<T extends ExistF<T>> {
+ // java helpfully re-interprets ExistF<?> as ExistF<?0 extends ExistF<?0>>
+ public ExistF<?> foo() { throw new RuntimeException(); }
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/ExistIndir.java b/test/files/pos/t6169/ExistIndir.java
new file mode 100644
index 0000000000..e66d1698c4
--- /dev/null
+++ b/test/files/pos/t6169/ExistIndir.java
@@ -0,0 +1,4 @@
+public class ExistIndir<T extends String, U extends T> {
+ // java helpfully re-interprets ExistIndir<?> as ExistIndir<? extends String>
+ public ExistIndir<?, ?> foo() { throw new RuntimeException(); }
+}
diff --git a/test/files/pos/t6169/OP.java b/test/files/pos/t6169/OP.java
new file mode 100644
index 0000000000..15e4c5640f
--- /dev/null
+++ b/test/files/pos/t6169/OP.java
@@ -0,0 +1 @@
+public abstract class OP<T> { }
diff --git a/test/files/pos/t6169/Skin.java b/test/files/pos/t6169/Skin.java
new file mode 100644
index 0000000000..780de1ee09
--- /dev/null
+++ b/test/files/pos/t6169/Skin.java
@@ -0,0 +1 @@
+public interface Skin<C extends Skinnable> { }
diff --git a/test/files/pos/t6169/Skinnable.java b/test/files/pos/t6169/Skinnable.java
new file mode 100644
index 0000000000..f91eaa30d8
--- /dev/null
+++ b/test/files/pos/t6169/Skinnable.java
@@ -0,0 +1,3 @@
+public interface Skinnable {
+ OP<Skin<?>> skinProperty();
+}
diff --git a/test/files/pos/t6169/skinnable.scala b/test/files/pos/t6169/skinnable.scala
new file mode 100644
index 0000000000..3ba2734526
--- /dev/null
+++ b/test/files/pos/t6169/skinnable.scala
@@ -0,0 +1,14 @@
+object ObjectProperty {
+ implicit def jfxObjectProperty2sfx[T](p: OP[T]) = new ObjectProperty[T](p)
+}
+
+class ObjectProperty[T](val delegate: OP[T])
+
+trait TestWildcardBoundInference {
+ def delegate: Skinnable
+ def skin: ObjectProperty[Skin[_ /* inferred: <: Skinnable */]] = ObjectProperty.jfxObjectProperty2sfx(delegate.skinProperty)
+ skin: ObjectProperty[Skin[_ <: Skinnable]]
+
+ def skinCheckInference = delegate.skinProperty
+ skinCheckInference: ObjectProperty[Skin[_ <: Skinnable]]
+} \ No newline at end of file
diff --git a/test/files/pos/t6169/t6169.scala b/test/files/pos/t6169/t6169.scala
new file mode 100644
index 0000000000..37f42619ca
--- /dev/null
+++ b/test/files/pos/t6169/t6169.scala
@@ -0,0 +1,7 @@
+class Test {
+ class MyExist extends ExistF[MyExist]
+ // SI-8197, SI-6169: java infers the bounds of existentials, so we have to as well now that SI-1786 is fixed...
+ def stringy: Exist[_ <: String] = (new Exist[String]).foo
+ def fbounded: (ExistF[t] forSome {type t <: ExistF[t] }) = (new MyExist).foo
+ def indir: ExistIndir[_ <: String, _ <: String] = (new ExistIndir[String, String]).foo
+} \ No newline at end of file
diff --git a/test/files/pos/t7322.scala b/test/files/pos/t7322.scala
new file mode 100644
index 0000000000..006bf89e9f
--- /dev/null
+++ b/test/files/pos/t7322.scala
@@ -0,0 +1,11 @@
+
+package object t7322 {
+ implicit class X(sc: StringContext) {
+ def x_?(args: Any*) = "hi there"
+ }
+}
+package t7322 {
+ trait Y {
+ x_?"junk" // assume that if it compiles, it works
+ }
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
index 9f51248095..b38687c8b3 100644
--- a/test/files/pos/t7377/Macro_1.scala
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -2,6 +2,6 @@ import language.experimental._
import scala.reflect.macros.blackbox.Context
object M {
- def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.resetLocalAttrs(expr.tree)))
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.untypecheck(expr.tree)))
def noop[A](expr: A): A = macro noopImpl[A]
}
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
index 3bba19966d..3bd477dcda 100644
--- a/test/files/pos/t7516/A_1.scala
+++ b/test/files/pos/t7516/A_1.scala
@@ -3,7 +3,7 @@ import scala.reflect._,macros._, scala.language.experimental.macros
object A {
def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
val r = c.universe.reify { List(t.splice) }
- c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+ c.Expr[List[T]]( c.untypecheck(r.tree) )
}
def demo[T](t: T): List[T] = macro impl[T]
}
diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala
index dd42950b34..9f1e6955b4 100644
--- a/test/files/pos/t8064/Macro_1.scala
+++ b/test/files/pos/t8064/Macro_1.scala
@@ -5,6 +5,6 @@ object Macro {
def apply(a: Any): Any = macro impl
def impl(c: Context)(a: c.Tree): c.Tree = {
- c.resetLocalAttrs(a)
+ c.untypecheck(a)
}
}
diff --git a/test/files/pos/t8170.scala b/test/files/pos/t8170.scala
new file mode 100644
index 0000000000..b65f4b8572
--- /dev/null
+++ b/test/files/pos/t8170.scala
@@ -0,0 +1,27 @@
+object O {
+ trait X
+ trait B extends A {
+ override type T[F1 <: X] = F1
+ }
+ trait A {
+ type T[F <: X]
+ }
+}
+
+object Test {
+ import O._
+ val a: B = ???
+ val b: a.T[X] = ???
+ b.ensuring(x => true) // trigger an implicit search
+}
+
+
+/*
+this = {AliasArgsTypeRef@3004}"Test#7680.a#14899.T#14823[O#7702.X#7793]"
+ sym = type T#14823
+ info = namer: [F#14824 <: O#7703.X#7793]F#14824
+result = {AbstractNoArgsTypeRef@3237}"F#24451"
+tp = {PolyType@3235}"[F#14824 <: O#7703.X#7793]F#14824"
+tparams =
+ (0) = {AbstractTypeSymbol@3247}"type F#24451"
+*/ \ No newline at end of file
diff --git a/test/files/pos/t8170b.scala b/test/files/pos/t8170b.scala
new file mode 100644
index 0000000000..53036f6c8a
--- /dev/null
+++ b/test/files/pos/t8170b.scala
@@ -0,0 +1,25 @@
+import language._
+
+object ScalaZeee {
+ trait HFold[M[_], U] {
+ type Apply[E, A <: U] <: U
+ }
+ trait GenericCons[M[_], H, +T <: GenericList[M]] extends GenericList[M] {
+ val tail: T
+ override type Folded[N[X] >: M[X], U, F <: HFold[N, U]] = F#Apply[H, tail.Folded[N, U, F]]
+ }
+ val KNil: GenericList[Nothing] = ???
+ sealed trait GenericList[+M[_]] {
+ type Folded[N[X] >: M[X], U, F <: HFold[N, U]] <: U
+ }
+}
+
+object TypelevelUsage {
+ import ScalaZeee._
+ type T = GenericCons[Some, String, KNil.type]
+ val klist1: T = ???
+ type T2 = klist1.Folded[Option, Int, HFold[Option, Int]]
+ val count2: T2 = ???
+
+ count2.ensuring(x => true).toChar // trigger an implicit search
+}
diff --git a/test/files/pos/t8237.scala b/test/files/pos/t8237.scala
new file mode 100644
index 0000000000..005089079e
--- /dev/null
+++ b/test/files/pos/t8237.scala
@@ -0,0 +1,29 @@
+import scala.language.higherKinds
+
+object TestExplicit {
+ trait TC[A]
+ def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+ implicit def tc[T]: TC[T] = ???
+
+ // Typechecking results in SOE in TypeVar.isGround
+ fTt(1)(tc)
+ // fun = TestExplicit.this.fTt[Int, E](1)
+ // args = TestExplicit.this.tc[E[Int]]
+ // argTpes.head.instantiateTypeParams = TC[?E#1[Int]]
+ // formals.head.instantiateTypeParams = TC[?E#2[Int]]
+ // (where ?E#1 and ?E#2 as distinct AppliedTypeVars that resulted
+ // from separate applications of type args to the same HKTypeVar, ?E)
+ //
+ // As we check if the argument conforms to the formal, we would have
+ // AppliedTypeVars sharing the same TypeConstraints on the LHS and RHS,
+ // which leads to a cyclic constraint.
+}
+
+object TestImplicit {
+ trait TC[A]
+ def fTt[A,E[X] <: List[X]](a: A)(implicit tt: TC[E[A]]) = a
+ implicit def tc[T]: TC[T] = ???
+
+ // Oddly enough, this one works.
+ fTt(1)
+}
diff --git a/test/files/pos/t8237b.scala b/test/files/pos/t8237b.scala
new file mode 100644
index 0000000000..52bb310e8b
--- /dev/null
+++ b/test/files/pos/t8237b.scala
@@ -0,0 +1,10 @@
+import scala.language.higherKinds
+import scala.reflect.runtime.universe._
+object Test {
+
+ def fTt[A,E[X]<:List[X]](a: A)(implicit tt: TypeTag[E[A]]) = a
+
+ trait TC[A]
+ implicit def TCListInt[A]: TC[A] = ???
+ fTt(1)
+}
diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala
index c3ace590ed..1d4891fd1f 100644
--- a/test/files/run/global-showdef.scala
+++ b/test/files/run/global-showdef.scala
@@ -54,7 +54,7 @@ object Bippy {
val run = new compiler.Run()
run.compileSources(List(src))
}
- output.linesIterator.toList
+ output.lines.toList
}
def showClass(name: String) = lines("-Yshow:typer", "-Xshow-class", name)
def showObject(name: String) = lines("-Yshow:typer", "-Xshow-object", name)
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
index 624479480d..f038d8714f 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.blackbox.Context
object Impls {
def foo(c: Context)(x: c.Expr[Int]) = {
import c.universe._
- val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
+ val x1 = c.Expr[Int](c.untypecheck(x.tree))
c.Expr[Int](Literal(Constant(c.eval(x1))))
}
}
diff --git a/test/files/run/private-override.scala b/test/files/run/private-override.scala
deleted file mode 100644
index 0a3f57f97c..0000000000
--- a/test/files/run/private-override.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package test.p1.p2 {
- abstract class A {
- private[p2] def f2(): Int = 1
- }
- abstract class Other extends A {
- // It's a private method - not a private[p2] method. Not a failed
- // "weaker access privileges" override, a different namespace.
- private def f2(): Int = super.f2() + 2
- def go() = f2()
- }
-}
-
-object Test extends test.p1.p2.Other {
- def main(args: Array[String]): Unit = {
- println(go())
- }
-}
diff --git a/test/pending/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala
index 0ad5f2ab66..0c96974df7 100644
--- a/test/pending/run/reflection-sync-potpourri.scala
+++ b/test/files/run/reflection-sync-potpourri.scala
@@ -24,7 +24,7 @@ object Test extends App {
override def run(): Unit = {
val s1 = foo("42")
val s2 = perms(diceRolls(i - 1)).map(x => force(x)).sorted.mkString(", ")
- assert(s1 == "java.lang.String")
+ assert(s1 == "String" || s1 == "java.lang.String")
assert(s2 == "java.lang.annotation.Annotation, java.lang.reflect.Method, scala.io.BufferedSource, scala.io.Codec")
}
})
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
index be62c5780b..ead61e76ac 100644
--- a/test/files/run/stringinterpolation_macro-run.check
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -46,6 +46,8 @@ S
120
120
120
+ 0X4
+She is 4 feet tall.
120
42
3.400000e+00
@@ -60,3 +62,6 @@ S
05/26/12
05/26/12
05/26/12
+%
+7 7 9
+7 9 9
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
index 1138cd0860..ff779dd1d3 100644
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -72,6 +72,14 @@ println(f"${120 : java.lang.Integer}%d")
println(f"${120 : java.lang.Long}%d")
println(f"${BigInt(120)}%d")
println(f"${new java.math.BigInteger("120")}%d")
+println(f"${4}%#10X")
+
+locally {
+ val fff = new java.util.Formattable {
+ def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4")
+ }
+ println(f"She is ${fff}%#s feet tall.")
+}
{
implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
@@ -103,4 +111,11 @@ println(f"${c.getTime.getTime}%TD")
implicit val strToDate = (x: String) => c
println(f"""${"1234"}%TD""")
+
+
+// literals and arg indexes
+println(f"%%")
+println(f"${7}%d %<d ${9}%d")
+println(f"${7}%d %2$$d ${9}%d")
+
}
diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala
index 019ddf7cd6..c6e976038d 100644
--- a/test/files/run/t7240/Macros_1.scala
+++ b/test/files/run/t7240/Macros_1.scala
@@ -41,7 +41,7 @@ object Bakery {
def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List())
c.eval(c.Expr[Any](
- c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor))))
+ c.untypecheck(Block(composeDSL(Literal(Constant(1))), constructor))))
c.Expr[Any](Literal(Constant(1)))
}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index d03ee3a6cf..b7443aa0c4 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -21,7 +21,7 @@ scala> convert(Some[Int](0))
--- because ---
argument expression's type is not compatible with formal parameter type;
found : Some[Int]
- required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } }
convert(Some[Int](0))
^
<console>:12: error: type mismatch;
diff --git a/test/files/run/t7700.check b/test/files/run/t7700.check
new file mode 100644
index 0000000000..ca8e686984
--- /dev/null
+++ b/test/files/run/t7700.check
@@ -0,0 +1,2 @@
+public abstract java.lang.Object C.bar(java.lang.Object)
+public abstract java.lang.Object C.foo(java.lang.Object)
diff --git a/test/files/run/t7700.scala b/test/files/run/t7700.scala
new file mode 100644
index 0000000000..76d16b808c
--- /dev/null
+++ b/test/files/run/t7700.scala
@@ -0,0 +1,17 @@
+import scala.annotation._
+
+trait C[@specialized U] {
+ @unspecialized
+ def foo(u: U): U
+ @unspecialized
+ def bar[A](u: U) = u
+}
+
+object Test extends App {
+ val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName)
+ println(declared.mkString("\n"))
+ object CInt extends C[Int] { def foo(i: Int) = i }
+ object CAny extends C[Any] { def foo(a: Any) = a }
+ assert(CInt.foo(1) == 1)
+ assert(CAny.foo("") == "")
+}
diff --git a/test/files/run/t8233-bcode.flags b/test/files/run/t8233-bcode.flags
new file mode 100644
index 0000000000..c30091d3de
--- /dev/null
+++ b/test/files/run/t8233-bcode.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode
diff --git a/test/files/run/t8233-bcode.scala b/test/files/run/t8233-bcode.scala
new file mode 100644
index 0000000000..fae1c2b702
--- /dev/null
+++ b/test/files/run/t8233-bcode.scala
@@ -0,0 +1,18 @@
+object Test {
+ def bar(s: String) = s;
+ val o: Option[Null] = None
+ def nullReference {
+ val a: Null = o.get
+ bar(a) // Was: VerifyError under GenICode
+ }
+
+ def literal {
+ val a: Null = null
+ bar(a)
+ }
+
+ def main(args: Array[String]) = {
+ try { nullReference } catch { case _: NoSuchElementException => }
+ literal
+ }
+}
diff --git a/test/files/run/t8233.scala b/test/files/run/t8233.scala
new file mode 100644
index 0000000000..fae1c2b702
--- /dev/null
+++ b/test/files/run/t8233.scala
@@ -0,0 +1,18 @@
+object Test {
+ def bar(s: String) = s;
+ val o: Option[Null] = None
+ def nullReference {
+ val a: Null = o.get
+ bar(a) // Was: VerifyError under GenICode
+ }
+
+ def literal {
+ val a: Null = null
+ bar(a)
+ }
+
+ def main(args: Array[String]) = {
+ try { nullReference } catch { case _: NoSuchElementException => }
+ literal
+ }
+}
diff --git a/test/files/run/t8245.scala b/test/files/run/t8245.scala
new file mode 100644
index 0000000000..d44defbb9e
--- /dev/null
+++ b/test/files/run/t8245.scala
@@ -0,0 +1,14 @@
+object Test {
+ def foo(o: Option[Int]): Int = {
+ lazy val i: Int = {
+ def local: Int = {if ("".isEmpty) return 42; -42}
+ assert(local == 42)
+ o.getOrElse(return -1)
+ }
+ i + 1
+ }
+
+ def main(args: Array[String]) {
+ assert(foo(None) == -1)
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
index 3166eb7a99..dcd4f63a4d 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
@@ -8,7 +8,8 @@ object DefinitionConstructionProps
with TypeDefConstruction
with ValDefConstruction
with DefConstruction
- with PackageConstruction {
+ with PackageConstruction
+ with ImportConstruction {
property("SI-6842") = test {
val x: Tree = q"val x: Int"
assertEqAst(q"def f($x) = 0", "def f(x: Int) = 0")
@@ -229,13 +230,13 @@ trait MethodConstruction { self: QuasiquoteProperties =>
property("splice type name into annotation") = test {
val name = TypeName("annot")
- assertSameAnnots(q"@$name def foo", List(annot(name)))
+ assertSameAnnots(q"@$name def foo", List(q"new $name"))
}
property("splice ident into annotation") = test {
val name = TypeName("annot")
val ident = Ident(name)
- assertSameAnnots(q"@$ident def foo", List(annot(name)))
+ assertSameAnnots(q"@$ident def foo", List(q"new $name"))
}
property("splice idents into annotation") = test {
@@ -245,36 +246,36 @@ trait MethodConstruction { self: QuasiquoteProperties =>
}
property("splice constructor calls into annotation") = test {
- val ctorcalls = List(annot("a1"), annot("a2"))
+ val ctorcalls = List(q"new a1", q"new a2")
assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
}
property("splice multiple annotations (1)") = test {
- val annot1 = annot("a1")
- val annot2 = annot("a2")
+ val annot1 = q"new a1"
+ val annot2 = q"new a2"
val res = q"@$annot1 @$annot2 def foo"
assertSameAnnots(res, List(annot1, annot2))
}
property("splice multiple annotations (2)") = test {
- val annot1 = annot("a1")
- val annots = List(annot("a2"), annot("a3"))
+ val annot1 = q"new a1"
+ val annots = List(q"new a2", q"new a3")
val res = q"@$annot1 @..$annots def foo"
assertSameAnnots(res, annot1 :: annots)
}
property("splice annotations with arguments (1)") = test {
- val a = annot("a", List(q"x"))
+ val a = q"new a(x)"
assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
}
property("splice annotations with arguments (2)") = test {
- val a = newTypeName("a")
+ val a = TypeName("a")
assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
}
property("splice annotations with arguments (3") = test {
- val a = Ident(newTypeName("a"))
+ val a = Ident(TypeName("a"))
assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
}
@@ -286,7 +287,7 @@ trait MethodConstruction { self: QuasiquoteProperties =>
}
property("can't splice annotations with arguments specificed twice") = test {
- val a = annot("a", List(q"x"))
+ val a = q"new a(x)"
assertThrows[IllegalArgumentException] {
q"@$a(y) def foo"
}
@@ -363,3 +364,30 @@ trait DefConstruction { self: QuasiquoteProperties =>
assertEqAst(q"def foo(implicit ..$xs) = x1 + x2", "def foo(implicit x1: Int, x2: Long) = x1 + x2")
}
}
+
+trait ImportConstruction { self: QuasiquoteProperties =>
+ property("construct wildcard import") = test {
+ val sel = pq"_"
+ assert(q"import foo.$sel" ≈ q"import foo._")
+ }
+
+ property("construct named import") = test {
+ val sel = pq"bar"
+ assert(q"import foo.$sel" ≈ q"import foo.bar")
+ }
+
+ property("construct renaming import") = test {
+ val sel = pq"bar -> baz"
+ assert(q"import foo.$sel" ≈ q"import foo.{bar => baz}")
+ }
+
+ property("construct unimport import") = test {
+ val sels = pq"poison -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{poison => _, _}")
+ }
+
+ property("construct mixed import") = test {
+ val sels = pq"a -> b" :: pq"c -> _" :: pq"_" :: Nil
+ assert(q"import foo.{..$sels}" ≈ q"import foo.{a => b, c => _, _}")
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
index 209fe9bbeb..e2d1757d48 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
@@ -10,6 +10,7 @@ object DefinitionDeconstructionProps
with ValVarDeconstruction
with DefDeconstruction
with PackageDeconstruction
+ with ImportDeconstruction
trait TraitDeconstruction { self: QuasiquoteProperties =>
property("exhaustive trait matcher") = test {
@@ -125,18 +126,28 @@ trait ModsDeconstruction { self: QuasiquoteProperties =>
}
property("@..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
+ val a = q"new a"
+ val b = q"new b"
val q"@..$annots def foo" = q"@$a @$b def foo"
annots ≈ List(a, b)
}
property("@$annot @..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
- val c = annot("c")
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
- first ≈ a && rest ≈ List(b, c)
+ assert(first ≈ a)
+ assert(rest ≈ List(b, c))
+ }
+
+ property("@..$anots @$annot def foo") = test {
+ val a = q"new a"
+ val b = q"new b"
+ val c = q"new c"
+ val q"@..$init @$last def foo" = q"@$a @$b @$c def foo"
+ assert(init ≈ List(a, b))
+ assert(last ≈ c)
}
}
@@ -209,3 +220,55 @@ trait DefDeconstruction { self: QuasiquoteProperties =>
assert(impl.isEmpty)
}
}
+
+trait ImportDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive import matcher") = test {
+ def matches(line: String) = {
+ val q"import $ref.{..$sels}" = parse(line)
+ }
+ matches("import foo.bar")
+ matches("import foo.{bar, baz}")
+ matches("import foo.{a => b, c => d}")
+ matches("import foo.{poision => _, _}")
+ matches("import foo.bar.baz._")
+ }
+
+ property("extract import binding") = test {
+ val q"import $_.$sel" = q"import foo.bar"
+ val pq"bar" = sel
+ }
+
+ property("extract import wildcard") = test {
+ val q"import $_.$sel" = q"import foo._"
+ val pq"_" = sel
+ }
+
+ property("extract import rename") = test {
+ val q"import $_.$sel" = q"import foo.{bar => baz}"
+ val pq"bar -> baz" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"baz" = right
+ }
+
+ property("extract import unimport") = test {
+ val q"import $_.$sel" = q"import foo.{bar => _}"
+ val pq"bar -> _" = sel
+ val pq"$left -> $right" = sel
+ val pq"bar" = left
+ val pq"_" = right
+ }
+
+ property("splice names into import selector") = forAll {
+ (expr: Tree, plain: TermName, oldname: TermName, newname: TermName, discard: TermName) =>
+
+ val Import(expr1, List(
+ ImportSelector(plain11, _, plain12, _),
+ ImportSelector(oldname1, _, newname1, _),
+ ImportSelector(discard1, _, wildcard, _))) =
+ q"import $expr.{$plain, $oldname => $newname, $discard => _}"
+
+ expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
+ oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
index 3a66574c7d..1ba9bba381 100644
--- a/test/files/scalacheck/quasiquotes/ErrorProps.scala
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -32,12 +32,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
q"@...$annots def foo"
""")
- property("@..$first @$rest def foo") = fails(
- "Can't extract with .. here",
- """
- q"@a @b @c def foo" match { case q"@..$first @$rest def foo" => }
- """)
-
property("only literal string arguments") = fails(
"Quasiquotes can only be used with literal strings",
"""
@@ -140,12 +134,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
q"$m1 $m2 def foo"
""")
- property("can't extract with .. card here") = fails(
- "Can't extract with .. here",
- """
- val q"f(..$xs, $y)" = EmptyTree
- """)
-
property("can't extract mods with annots") = fails(
"Can't extract modifiers together with annotations, consider extracting just modifiers",
"""
diff --git a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
index cccf8095db..c8e66c7ef5 100644
--- a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
@@ -22,8 +22,18 @@ object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstr
pat0 ≈ pat && subpat0 ≈ subpat
}
+ property("extract apply many") = forAll { (pat: Tree, subpats: List[Tree]) =>
+ val pq"$pat0(..$subpats0)" = pq"$pat(..$subpats)"
+ pat0 ≈ pat && subpats0 ≈ subpats
+ }
+
+ property("extract apply last") = forAll { (pat: Tree, subpats: List[Tree], subpatlast: Tree) =>
+ val pq"$pat0(..$subpats0, $subpatlast0)" = pq"$pat(..$subpats, $subpatlast)"
+ pat0 ≈ pat && subpats0 ≈ subpats && subpatlast0 ≈ subpatlast
+ }
+
property("extract casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
val cq"$pat0 if $cond0 => $body0" = cq"$pat if $cond => $body"
pat0 ≈ pat && cond0 ≈ cond && body0 ≈ body
}
-} \ No newline at end of file
+}
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
index e4ee5dfcae..589b8d4d72 100644
--- a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
+++ b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
@@ -116,10 +116,5 @@ trait Helpers {
}
}
- def annot(name: String): Tree = annot(TypeName(name), Nil)
- def annot(name: TypeName): Tree = annot(name, Nil)
- def annot(name: String, args: List[Tree]): Tree = annot(TypeName(name), args)
- def annot(name: TypeName, args: List[Tree]): Tree = q"new $name(..$args)"
-
val scalapkg = build.setSymbol(Ident(TermName("scala")), definitions.ScalaPackage)
}
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
index 54187d68c2..058880a25c 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -85,19 +85,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun)
}
- property("splice names into import selector") = forAll {
- (expr: Tree, plain: Name, oldname: Name, newname: Name, discard: Name) =>
-
- val Import(expr1, List(
- ImportSelector(plain11, _, plain12, _),
- ImportSelector(oldname1, _, newname1, _),
- ImportSelector(discard1, _, wildcard, _))) =
- q"import $expr.{$plain, $oldname => $newname, $discard => _}"
-
- expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
- oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD
- }
-
property("splice trees into while loop") = forAll { (cond: Tree, body: Tree) =>
val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body"
body1 ≈ body && cond1 ≈ cond
@@ -291,4 +278,9 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
val stats2 = List.empty[Tree]
assert(q"{ ..$stats2 }" ≈ q"")
}
+
+ property("consistent variable order") = test {
+ val q"$a = $b = $c = $d = $e = $f = $g = $h = $k = $l" = q"a = b = c = d = e = f = g = h = k = l"
+ assert(a ≈ q"a" && b ≈ q"b" && c ≈ q"c" && d ≈ q"d" && e ≈ q"e" && g ≈ q"g" && h ≈ q"h" && k ≈ q"k" && l ≈ q"l")
+ }
}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
index 8d1ada342a..148bb383b0 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -29,14 +29,34 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
y1 ≈ x1 && y2 ≈ x2 && ys ≈ List(x3)
}
+ property("f(y1, ..ys, yn)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f($y1, ..$ys, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ y1 ≈ x1 && ys ≈ List(x2, x3) && yn ≈ x4
+ }
+
+ property("f(..ys, y_{n-1}, y_n)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(..$ys, $yn1, $yn)" = q"f($x1, $x2, $x3, $x4)"
+ ys ≈ List(x1, x2) && yn1 ≈ x3 && yn ≈ x4
+ }
+
property("f(...xss)") = forAll { (x1: Tree, x2: Tree) =>
- val q"f(...$argss)" = q"f($x1)($x2)"
- argss ≈ List(List(x1), List(x2))
+ val q"f(...$xss)" = q"f($x1)($x2)"
+ xss ≈ List(List(x1), List(x2))
+ }
+
+ property("f(...$xss)(..$last)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f(...$xss)(..$last)" = q"f($x1)($x2)($x3)"
+ xss ≈ List(List(x1), List(x2)) && last ≈ List(x3)
+ }
+
+ property("f(...$xss)(..$lastinit, $lastlast)") = forAll { (x1: Tree, x2: Tree, x3: Tree, x4: Tree) =>
+ val q"f(...$xss)(..$lastinit, $lastlast)" = q"f($x1)($x2, $x3, $x4)"
+ xss ≈ List(List(x1)) && lastinit ≈ List(x2, x3) && lastlast ≈ x4
}
property("f(...xss) = f") = forAll { (x1: Tree, x2: Tree) =>
- val q"f(...$argss)" = q"f"
- argss ≈ List()
+ val q"f(...$xss)" = q"f"
+ xss ≈ List()
}
property("deconstruct unit as tuple") = test {
@@ -51,12 +71,27 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
property("deconstruct tuple mixed") = test {
val q"($first, ..$rest)" = q"(a, b, c)"
- assert(first ≈ q"a" && rest ≈ List(q"b", q"c"))
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ }
+
+ property("deconstruct tuple last element") = test {
+ val q"($first, ..$rest, $last)" = q"(a, b, c, d)"
+ assert(first ≈ q"a")
+ assert(rest ≈ List(q"b", q"c"))
+ assert(last ≈ q"d")
}
property("deconstruct cases") = test {
val q"$x match { case ..$cases }" = q"x match { case 1 => case 2 => }"
- x ≈ q"x" && cases ≈ List(cq"1 =>", cq"2 =>")
+ assert(x ≈ q"x")
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ }
+
+ property("deconstruct splitting last case") = test {
+ val q"$_ match { case ..$cases case $last }" = q"x match { case 1 => case 2 => case 3 => }"
+ assert(cases ≈ List(cq"1 =>", cq"2 =>"))
+ assert(last ≈ cq"3 =>")
}
property("deconstruct block") = test {
@@ -64,6 +99,12 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
assert(xs ≈ List(q"x1", q"x2", q"x3"))
}
+ property("deconstruct last element of a block") = test {
+ val q"{ ..$xs; $x }" = q"x1; x2; x3; x4"
+ assert(xs ≈ List(q"x1", q"x2", q"x3"))
+ assert(x ≈ q"x4")
+ }
+
property("exhaustive function matcher") = test {
def matches(line: String) { val q"(..$args) => $body" = parse(line) }
matches("() => bippy")
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
index 0984032084..78b54a4e49 100644
--- a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
@@ -30,7 +30,7 @@ object TypeConstructionProps extends QuasiquoteProperties("type construction")
}
property("empty tq") = test {
- val tt: TypeTree = tq" "
+ val tt: TypeTree = tq""
assert(tt.tpe == null)
assert(tt.original == null)
}
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
index 499f5d6d8e..0fdcc19052 100644
--- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
@@ -13,23 +13,49 @@ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction
a ≈ Ident(name1) && b ≈ Ident(name2)
}
- property("tuple type") = test {
+ property("tuple type (1)") = test {
val tq"(..$empty)" = tq"_root_.scala.Unit"
assert(empty.isEmpty)
+ }
+
+ property("tuple type (2)") = test {
val tq"(..$ts)" = tq"(t1, t2)"
assert(ts ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (3)") = test {
val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
- assert(head ≈ tq"t0" && tail ≈ List(tq"t1", tq"t2"))
+ assert(head ≈ tq"t0")
+ assert(tail ≈ List(tq"t1", tq"t2"))
+ }
+
+ property("tuple type (4)") = test {
+ val tq"(..$init, $last)" = tq"(t0, t1, t2)"
+ assert(init ≈ List(tq"t0", tq"t1"))
+ assert(last ≈ tq"t2")
}
property("refined type") = test {
val tq"T { ..$stats }" = tq"T { def foo; val x: Int; type Y = String }"
- assert(stats ≈ (q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil))
+ assert(stats ≈ List(q"def foo", q"val x: Int", q"type Y = String"))
}
- property("function type") = test {
+ property("function type (1)") = test {
val tq"..$argtpes => $restpe" = tq"(A, B) => C"
- assert(argtpes ≈ (tq"A" :: tq"B" :: Nil))
+ assert(argtpes ≈ List(tq"A", tq"B"))
assert(restpe ≈ tq"C")
}
-} \ No newline at end of file
+
+ property("function type (2)") = test {
+ val tq"(..$argtpes, $arglast) => $restpe" = tq"(A, B, C) => D"
+ assert(argtpes ≈ List(tq"A", tq"B"))
+ assert(arglast ≈ tq"C")
+ assert(restpe ≈ tq"D")
+ }
+
+ property("match empty type tree") = test {
+ val tq"" = TypeTree()
+ // matches because type tree isn't syntactic without original
+ val tq"" = tq"${typeOf[Int]}"
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
index 2f501435e3..3afb47952c 100644
--- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
@@ -75,4 +75,11 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked") {
assert(f.original ≈ pq"Test.this.Cell")
assert(args ≈ List(pq"v"))
}
-} \ No newline at end of file
+
+ property("extract inferred val type") = test {
+ val typechecked = typecheck(q"val x = 42")
+ val q"val x = 42" = typechecked
+ val q"val x: ${tq""} = 42" = typechecked
+ val q"val x: ${t: Type} = 42" = typechecked
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index 7bb8ca958b..0d6f43db06 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -478,7 +478,7 @@ class SetMapConsistencyTest {
}
@Test
- def si8213() {
+ def testSI8213() {
val am = new scala.collection.mutable.AnyRefMap[String, Int]
for (i <- 0 until 1024) am += i.toString -> i
am.getOrElseUpdate("1024", { am.clear; -1 })
@@ -488,4 +488,23 @@ class SetMapConsistencyTest {
lm.getOrElseUpdate(1024, { lm.clear; -1 })
assert(lm == scala.collection.mutable.LongMap(1024L -> -1))
}
+
+ // Mutating when an iterator is in the wild shouldn't produce random junk in the iterator
+ // Todo: test all sets/maps this way
+ @Test
+ def testSI8154() {
+ def f() = {
+ val xs = scala.collection.mutable.AnyRefMap[String, Int]("a" -> 1)
+ val it = xs.iterator
+ it.hasNext
+ xs.clear()
+
+ if (it.hasNext) Some(it.next)
+ else None
+ }
+ assert(f() match {
+ case Some((a,b)) if (a==null || b==null) => false
+ case _ => true
+ })
+ }
}
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
index 68fedb7c83..d601f04f89 100644
--- a/test/osgi/src/BasicReflection.scala
+++ b/test/osgi/src/BasicReflection.scala
@@ -2,6 +2,8 @@ package tools.test.osgi
package reflection
package basic
+import scala.language.higherKinds
+
import org.junit.Assert._
import org.ops4j.pax.exam.CoreOptions._
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
index 7b14cf20e8..084afe8643 100644
--- a/test/osgi/src/ScalaOsgiHelper.scala
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -1,5 +1,5 @@
package tools.test.osgi
-
+
import org.ops4j.pax.exam.CoreOptions._
import org.ops4j.pax.exam
import java.io.File
@@ -12,7 +12,7 @@ trait ScalaOsgiHelper {
}
private def filteredBundleFiles(names: String*): Array[exam.Option] =
- for(bundle <- allBundleFiles; if names exists (bundle.getName contains))
+ for(bundle <- allBundleFiles; if names exists (bundle.getName contains _))
yield makeBundle(bundle)
private def makeBundle(file: File): exam.Option =
@@ -34,5 +34,5 @@ trait ScalaOsgiHelper {
val bundles = filteredBundleFiles("scala-library")
bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
}
-
+
}
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
index e673da5a29..b26c442599 100644
--- a/test/pending/run/idempotency-partial-functions.scala
+++ b/test/pending/run/idempotency-partial-functions.scala
@@ -22,7 +22,7 @@ object Test extends App {
val tb = cm.mkToolBox()
val tpartials = tb.typecheck(partials.tree)
println(tpartials)
- val rtpartials = tb.resetAllAttrs(tpartials)
+ val rtpartials = tb.untypecheck(tpartials)
println(tb.eval(rtpartials))
}
Test.main(null) \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_0.scala b/test/scaladoc/resources/SI-4014_0.scala
new file mode 100644
index 0000000000..c398fcc1e0
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_0.scala
@@ -0,0 +1,4 @@
+/** A template without authors.
+ *
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_1.scala b/test/scaladoc/resources/SI-4014_1.scala
new file mode 100644
index 0000000000..34386b515e
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_1.scala
@@ -0,0 +1,5 @@
+/** A template with one author.
+ *
+ * @author The Only Author
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/resources/SI-4014_2.scala b/test/scaladoc/resources/SI-4014_2.scala
new file mode 100644
index 0000000000..514f7a1e4c
--- /dev/null
+++ b/test/scaladoc/resources/SI-4014_2.scala
@@ -0,0 +1,6 @@
+/** A template with more than one author.
+ *
+ * @author The First Author
+ * @author The Second Author
+ */
+trait Foo \ No newline at end of file
diff --git a/test/scaladoc/run/t7124.check b/test/scaladoc/run/t7124.check
new file mode 100644
index 0000000000..96b627a322
--- /dev/null
+++ b/test/scaladoc/run/t7124.check
@@ -0,0 +1,3 @@
+List()
+List(Paragraph(Text(macro)))
+Done.
diff --git a/test/scaladoc/run/t7124.scala b/test/scaladoc/run/t7124.scala
new file mode 100644
index 0000000000..e8272000d2
--- /dev/null
+++ b/test/scaladoc/run/t7124.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.language.experimental.macros
+ class Test {
+ def print(): Unit = macro ???
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val p = root._class("Test")._method("print")
+
+ println(p.annotations) // no annotations
+ println(p.flags) // a 'macro' flag
+ }
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 03348b81d2..56328ea875 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -47,6 +47,7 @@ object Test extends Properties("HtmlFactory") {
settings.scaladocQuietRun = true
settings.nowarn.value = true
settings.classpath.value = getClasspath
+ settings.docAuthor.value = true
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
new DocFactory(reporter, settings)
@@ -563,12 +564,13 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for override") =
checkText("explicit-inheritance-override.scala")(
(Some("InheritDocDerived"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
Starting line
Starting line
The base comment. And another sentence...
The base comment. And another sentence...
Ending line
+ Author: StartAuthor a Scala developer EndAuthor
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
@@ -589,12 +591,13 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for usecase") =
checkText("explicit-inheritance-usecase.scala")(
(Some("UseCaseInheritDoc"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
[use case] Starting line
[use case] Starting line
The base comment. And another sentence...
The base comment. And another sentence...
Ending line
+ Author: StartAuthor a Scala developer EndAuthor
T StartT the type of the first argument EndT
arg1 Start1 The T term comment End1
arg2 Start2 The string comment End2
@@ -663,6 +666,45 @@ object Test extends Properties("HtmlFactory") {
}
}
+ property("SI-4014: Scaladoc omits @author: no authors") = {
+ val noAuthors = createTemplates("SI-4014_0.scala")("Foo.html")
+
+ noAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("Author")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: one author") = {
+ val oneAuthor = createTemplates("SI-4014_1.scala")("Foo.html")
+
+ oneAuthor match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Author:</h6>")
+ s.contains("<p>The Only Author\n</p>")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4014: Scaladoc omits @author: two authors") = {
+ val twoAuthors = createTemplates("SI-4014_2.scala")("Foo.html")
+
+ twoAuthors match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<h6>Authors:</h6>")
+ s.contains("<p>The First Author\n</p>")
+ s.contains("<p>The Second Author\n</p>")
+ }
+ case _ => false
+ }
+ }
+
{
val files = createTemplates("basic.scala")
//println(files)