summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build/maven/maven-deploy.xml21
-rw-r--r--src/build/pack.xml26
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala238
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala50
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala40
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala2
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala32
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala21
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala14
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala113
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala35
-rw-r--r--src/reflect/scala/reflect/api/Quasiquotes.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala80
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala35
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala48
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala6
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala21
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala49
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala30
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala7
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala64
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala43
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/FreshNameCreator.scala27
-rw-r--r--src/reflect/scala/reflect/runtime/Gil.scala25
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala52
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala74
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala496
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala91
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala53
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala129
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala85
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala28
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCaches.scala68
48 files changed, 1578 insertions, 621 deletions
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 946b712b6c..fbd6d4cd51 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -74,12 +74,21 @@
<artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
</artifact:deploy>
</then><else>
- <artifact:install file="${path}.jar">
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{name}.pom" />
- <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
- </artifact:install>
+ <if><isset property="docs.skip"/><then>
+ <artifact:install file="${path}.jar">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ </artifact:install>
+ </then>
+ <else>
+ <artifact:install file="${path}.jar">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ </artifact:install>
+ </else>
+ </if>
</else></if>
</then><else>
<local name="repo"/>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index ed628726fb..4c5ba228fc 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -56,7 +56,7 @@ MAIN DISTRIBUTION PACKAGING
<checksum file="${dists.dir}/archives/${dist.name}.zip" fileext=".md5"/>
</target>
- <target name="pack-devel-docs.tar" depends="pack-archives.zip">
+ <target name="pack-devel-docs.tar" depends="pack-archives.zip" unless="docs.skip">
<tar destfile="${dists.dir}/archives/${dist.name}-devel-docs.tar"
compression="none" longfile="gnu">
<tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
@@ -119,11 +119,11 @@ MAIN DISTRIBUTION PACKAGING
`resource` is relative to directory of `link` -->
<symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
resource="scala-${version.number}-sources.tgz"
- overwrite="yes"/>
+ overwrite="true"/>
</target>
<target name="pack-archives.latest.win" depends="pack-archives.src" if="os.win">
- <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz">
+ <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz" overwrite="true">
<fileset dir="${dists.dir}/archives">
<include name="scala-${version.number}-sources.tgz"/>
</fileset>
@@ -141,7 +141,7 @@ MAIN DISTRIBUTION PACKAGING
<attribute name="mvn.artifact.name"/>
<sequential>
<mkdir dir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}"/>
- <copy todir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}">
+ <copy verbose="true" overwrite="true" todir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}">
<fileset dir="${dist.dir}/lib/">
<filename name="@{mvn.artifact.name}.jar"/>
</fileset>
@@ -167,7 +167,7 @@ MAIN DISTRIBUTION PACKAGING
<attribute name="mvn.artifact.name"/>
<sequential>
<mkdir dir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}"/>
- <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}">
+ <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}" overwrite="true">
<fileset dir="${dist.dir}/misc/scala-devel/plugins/">
<filename name="@{mvn.artifact.name}.jar"/>
</fileset>
@@ -189,7 +189,7 @@ MAIN DISTRIBUTION PACKAGING
</jar>
</target>
- <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
+ <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins" unless="docs.skip">
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
@@ -209,30 +209,30 @@ MAIN DISTRIBUTION PACKAGING
<!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar" overwrite="true"/>
<copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar" overwrite="true"/>
<copy tofile="${dists.dir}/maven/${version.number}/scala-reflect/scala-reflect-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar" overwrite="true"/>
</target>
<target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
<symlink link="${dists.dir}/maven/latest"
resource="${version.number}"
- overwrite="yes"/>
+ overwrite="true"/>
</target>
<target name="pack-maven.latest.win" depends="pack-maven.docs" if="os.win">
- <copy todir="${dists.dir}/maven/latest">
+ <copy todir="${dists.dir}/maven/latest" overwrite="true">
<fileset dir="${dists.dir}/maven/${version.number}"/>
</copy>
</target>
<target name="pack-maven.scripts" depends="pack-maven.latest.unix,pack-maven.latest.win,pack-maven.srcs">
<copy todir="${dists.dir}/maven/${version.number}"
- file="${lib-ant.dir}/ant-contrib.jar"/>
+ file="${lib-ant.dir}/ant-contrib.jar" overwrite="true"/>
<copy todir="${dists.dir}/maven/${version.number}"
- file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar"/>
+ file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" overwrite="true"/>
<copy tofile="${dists.dir}/maven/${version.number}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
<!-- export properties for use when deploying -->
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index e535754a98..c2f14cf0f1 100644
--- a/src/compiler/scala/reflect/macros/contexts/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -4,7 +4,7 @@ package contexts
trait Names {
self: Context =>
- lazy val freshNameCreator = callsiteTyper.context.unit.fresh
+ def freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
freshName()
@@ -16,7 +16,7 @@ trait Names {
freshName[NameType](name)
def freshName(): String =
- freshNameCreator.newName()
+ freshName("fresh$")
def freshName(name: String): String =
freshNameCreator.newName(name)
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index ae6488b5a8..88cfea8157 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -11,7 +11,7 @@ trait Parsers {
val sreporter = new StoreReporter()
val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
val parser = newUnitParser(unit)
- val tree = gen.mkTreeOrBlock(parser.parseStats())
+ val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
sreporter.infos.foreach {
case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 1de5c1f626..df5952a4cf 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,8 +5,7 @@
package scala.tools.nsc
-import util.FreshNameCreator
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
import scala.tools.nsc.reporters.Reporter
@@ -27,10 +26,9 @@ trait CompilationUnits { global: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- val fresh: FreshNameCreator = new FreshNameCreator.Default
-
- def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
+ implicit val fresh: FreshNameCreator = new FreshNameCreator
+ def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
+ def freshTypeName(prefix: String) = global.freshTypeName(prefix)
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 1cd3e0ec4b..1c5354502b 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -110,9 +110,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** A spare instance of TreeBuilder left for backwards compatibility. */
- lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new UnitTreeBuilder {
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new TreeBuilder {
val global: Global.this.type = Global.this;
- val unit = currentUnit
+ def unit = currentUnit
+ def source = currentUnit.source
}
/** Fold constants */
@@ -1049,6 +1050,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentRun: Run = curRun
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+ def currentFreshNameCreator = currentUnit.fresh
def isGlobalInitialized = (
definitions.isDefinitionsInitialized
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 34f3fcce9f..1d5f35b7d6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -13,9 +13,8 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, Position }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
import Tokens._
-import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -41,11 +40,8 @@ trait ParsersCommon extends ScannersCommon { self =>
*/
abstract class ParserCommon {
val in: ScannerCommon
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def deprecationWarning(off: Int, msg: String): Unit
- def accept(token: Int): Int
+ def deprecationWarning(off: Offset, msg: String): Unit
+ def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -164,21 +160,13 @@ self =>
val in = newScanner()
in.init()
- private val globalFresh = new FreshNameCreator.Default
-
def unit = global.currentUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
- def o2p(offset: Int): Position = Position.offset(source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
// suppress warnings; silent abort on errors
- def warning(offset: Int, msg: String) {}
- def deprecationWarning(offset: Int, msg: String) {}
+ def warning(offset: Offset, msg: String) {}
+ def deprecationWarning(offset: Offset, msg: String) {}
- def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
@@ -225,14 +213,11 @@ self =>
override def newScanner() = new UnitScanner(unit, patches)
- override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
- override def warning(offset: Int, msg: String) {
+ override def warning(offset: Offset, msg: String) {
unit.warning(o2p(offset), msg)
}
- override def deprecationWarning(offset: Int, msg: String) {
+ override def deprecationWarning(offset: Offset, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
@@ -250,7 +235,7 @@ self =>
for ((offset, msg) <- syntaxErrors)
unit.error(o2p(offset), msg)
- override def syntaxError(offset: Int, msg: String) {
+ override def syntaxError(offset: Offset, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
else unit.error(o2p(offset), msg)
}
@@ -274,9 +259,10 @@ self =>
}
}
- final val Local = 0
- final val InBlock = 1
- final val InTemplate = 2
+ type Location = Int
+ final val Local: Location = 0
+ final val InBlock: Location = 1
+ final val InTemplate: Location = 2
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
@@ -295,23 +281,26 @@ self =>
abstract class Parser extends ParserCommon { parser =>
val in: Scanner
-
def unit: CompilationUnit
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, mid: Int, end: Int): Position
+ def source: SourceFile
- /** whether a non-continuable syntax error has been seen */
- private var lastErrorOffset : Int = -1
-
- class ParserTreeBuilder extends UnitTreeBuilder {
+ class ParserTreeBuilder extends TreeBuilder {
val global: self.global.type = self.global
def unit = parser.unit
+ def source = parser.source
}
val treeBuilder = new ParserTreeBuilder
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+ implicit def fresh: FreshNameCreator = unit.fresh
+
+ def o2p(offset: Offset): Position = Position.offset(source, offset)
+ def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+ def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
+ def r2p(offset: Offset): Position = r2p(offset, offset)
+
+ /** whether a non-continuable syntax error has been seen */
+ private var lastErrorOffset : Int = -1
/** The types of the context bounds of type parameters of the surrounding class
*/
@@ -344,9 +333,10 @@ self =>
*/
def parse(): Tree = parseRule(_.parseStartRule())
- /** This is alternative entry point for repl, script runner, toolbox and quasiquotes.
+ /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
*/
def parseStats(): List[Tree] = parseRule(_.templateStats())
+ def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
@@ -507,7 +497,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Int) {
+ protected def skip(targetToken: Token) {
var nparens = 0
var nbraces = 0
while (true) {
@@ -535,17 +525,17 @@ self =>
in.nextToken()
}
}
- def warning(offset: Int, msg: String): Unit
+ def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
syntaxError(pos pointOrElse in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String): Unit
+ def syntaxError(offset: Offset, msg: String): Unit
def syntaxError(msg: String, skipIt: Boolean) {
syntaxError(in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
// no more errors on this token.
@@ -569,10 +559,10 @@ self =>
}
def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
- def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
+ def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
- def accept(token: Int): Int = {
+ def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
@@ -632,8 +622,6 @@ self =>
def isAnnotation: Boolean = in.token == AT
- def isCaseDefStart: Boolean = in.token == CASE
-
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -660,14 +648,14 @@ self =>
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
- def isLiteralToken(token: Int) = token match {
+ def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
- def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+ def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -676,7 +664,7 @@ self =>
def isExprIntro: Boolean = isExprIntroToken(in.token)
- def isTypeIntroToken(token: Int): Boolean = token match {
+ def isTypeIntroToken(token: Token): Boolean = token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
@@ -684,7 +672,9 @@ self =>
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
- def isStatSep(token: Int): Boolean =
+ def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+ def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
@@ -699,10 +689,10 @@ self =>
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T = atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T = atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T = atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
+ def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
@@ -740,7 +730,7 @@ self =>
}
/** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
- final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -783,7 +773,7 @@ self =>
}
}
- def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) =
if (treeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
@@ -823,7 +813,7 @@ self =>
def argType(): Tree
def functionArgType(): Tree
- private def tupleInfixType(start: Int) = {
+ private def tupleInfixType(start: Offset) = {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
@@ -1060,7 +1050,7 @@ self =>
t
}
- def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+ def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
if (typeOK && in.token == TYPE) {
in.nextToken()
atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
@@ -1118,7 +1108,7 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = atPos(start) {
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
@@ -1160,7 +1150,7 @@ self =>
/** Consume a USCORE and create a fresh synthetic placeholder param. */
private def freshPlaceholder(): Tree = {
val start = in.offset
- val pname = freshName("x$")
+ val pname = freshTermName()
in.nextToken()
val id = atPos(start)(Ident(pname))
val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
@@ -1215,12 +1205,12 @@ self =>
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Int) {
+ def newLineOptWhenFollowedBy(token: Offset) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Int => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1235,7 +1225,7 @@ self =>
if (in.token == COLON) { in.nextToken(); typ() }
else TypeTree()
- def typeOrInfixType(location: Int): Tree =
+ def typeOrInfixType(location: Location): Tree =
if (location == Local) typ()
else startInfixType()
@@ -1246,7 +1236,7 @@ self =>
* WildcardType ::= `_' TypeBounds
* }}}
*/
- def wildcardType(start: Int) = {
+ def wildcardType(start: Offset) = {
val pname = freshTypeName("_$")
val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
@@ -1272,7 +1262,7 @@ self =>
/* hook for IDE, unlike expression can be stubbed
* don't use for any tree that can be inspected in the parser!
*/
- def statement(location: Int): Tree = expr(location) // !!! still needed?
+ def statement(location: Location): Tree = expr(location) // !!! still needed?
/** {{{
* Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
@@ -1299,9 +1289,9 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = withPlaceholders(expr0(location), isAny = false)
+ def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
- def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+ def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
@@ -1326,7 +1316,7 @@ self =>
in.nextToken()
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
- if (isCaseDefStart) caseClauses()
+ if (in.token == CASE) caseClauses()
else catchFromExpr()
}
}
@@ -1459,7 +1449,7 @@ self =>
* }}}
*/
- def implicitClosure(start: Int, location: Int): Tree = {
+ def implicitClosure(start: Offset, location: Location): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
@@ -1637,7 +1627,7 @@ self =>
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
- if (isCaseDefStart) Match(EmptyTree, caseClauses())
+ if (in.token == CASE) Match(EmptyTree, caseClauses())
else block()
}
}
@@ -1723,7 +1713,7 @@ self =>
while (in.token == IF) enums += makeFilter(in.offset, guard())
}
- def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
+ def makeFilter(start: Offset, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
/* -------- PATTERNS ------------------------------------------- */
@@ -2245,7 +2235,7 @@ self =>
}
}
val nameOffset = in.offset
- // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+ // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
val pname: TypeName = wildcardOrIdent().toTypeName
val param = atPos(start, nameOffset) {
val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2287,7 +2277,7 @@ self =>
t setPos o2p(in.offset)
}
- def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
+ def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2406,7 +2396,7 @@ self =>
* | type [nl] TypeDcl
* }}}
*/
- def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+ def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
@@ -2457,7 +2447,6 @@ self =>
EmptyTree
}
def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
val trees =
makePatDef(newmods,
if (tp.isEmpty) p
@@ -2536,7 +2525,7 @@ self =>
}
}
- def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
var newmods = mods
// contextBoundBuf is for context bounded type parameters of the form
@@ -2619,7 +2608,7 @@ self =>
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
* }}}
*/
- def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
@@ -2652,7 +2641,7 @@ self =>
* | [override] trait TraitDef
* }}}
*/
- def tmplDef(pos: Int, mods: Modifiers): Tree = {
+ def tmplDef(pos: Offset, mods: Modifiers): Tree = {
if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
@@ -2676,7 +2665,7 @@ self =>
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
* }}}
*/
- def classDef(start: Int, mods: Modifiers): ClassDef = {
+ def classDef(start: Offset, mods: Modifiers): ClassDef = {
in.nextToken()
val nameOffset = in.offset
val name = identForType()
@@ -2716,7 +2705,7 @@ self =>
* ObjectDef ::= Id ClassTemplateOpt
* }}}
*/
- def objectDef(start: Int, mods: Modifiers): ModuleDef = {
+ def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
in.nextToken()
val nameOffset = in.offset
val name = ident()
@@ -2741,10 +2730,9 @@ self =>
*/
def packageObjectDef(start: Offset): PackageDef = {
val defn = objectDef(in.offset, NoMods)
- val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
- val pid = atPos(o2p(defn.pos.start))(Ident(defn.name))
-
- makePackaging(start, pid, module :: Nil)
+ val pidPos = o2p(defn.pos.startOrPoint)
+ val pkgPos = r2p(start, pidPos.point)
+ gen.mkPackageObject(defn, pidPos, pkgPos)
}
def packageOrPackageObject(start: Offset): Tree = (
if (in.token == OBJECT)
@@ -2756,7 +2744,7 @@ self =>
)
// TODO - eliminate this and use "def packageObjectDef" (see call site of this
// method for small elaboration.)
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
+ def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
case ModuleDef(mods, name, impl) =>
makePackaging(
start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
@@ -2831,7 +2819,7 @@ self =>
* TraitExtends ::= `extends' | `<:'
* }}}
*/
- def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
@@ -2894,14 +2882,26 @@ self =>
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging. */
- def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
- def makeEmptyPackage(start: Int, stats: List[Tree]): PackageDef = (
+ def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
)
+ def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ def default(tok: Token) =
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+ while (!isStatSeqEnd) {
+ stats ++= stat.applyOrElse(in.token, default)
+ acceptStatSepOpt()
+ }
+ stats.toList
+ }
+
/** {{{
* TopStatSeq ::= TopStat {semi TopStat}
* TopStat ::= Annotations Modifiers TmplDef
@@ -2911,24 +2911,15 @@ self =>
* |
* }}}
*/
- def topStatSeq(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- stats ++= (in.token match {
- case PACKAGE =>
- packageOrPackageObject(in.skipToken()) :: Nil
- case IMPORT =>
- in.flushDoc
- importClause()
- case x if isAnnotation || isTemplateIntro || isModifier =>
- joinComment(topLevelTmplDef :: Nil)
- case _ =>
- if (isStatSep) Nil
- else syntaxErrorOrIncompleteAnd("expected class or object definition", skipIt = true)(Nil)
- })
- acceptStatSepOpt()
- }
- stats.toList
+ def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+ def topStat: PartialFunction[Token, List[Tree]] = {
+ case PACKAGE =>
+ packageOrPackageObject(in.skipToken()) :: Nil
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isAnnotation || isTemplateIntro || isModifier =>
+ joinComment(topLevelTmplDef :: Nil)
}
/** {{{
@@ -2972,25 +2963,20 @@ self =>
* |
* }}}
*/
- def templateStats(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- if (in.token == IMPORT) {
- in.flushDoc
- stats ++= importClause()
- } else if (isDefIntro || isModifier || isAnnotation) {
- stats ++= joinComment(nonLocalDefOrDcl)
- } else if (isExprIntro) {
- in.flushDoc
- stats += statement(InTemplate)
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
- }
- acceptStatSepOpt()
- }
- stats.toList
+ def templateStats(): List[Tree] = statSeq(templateStat)
+ def templateStat: PartialFunction[Token, List[Tree]] = {
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isDefIntro || isModifier || isAnnotation =>
+ joinComment(nonLocalDefOrDcl)
+ case _ if isExprIntro =>
+ in.flushDoc
+ statement(InTemplate) :: Nil
}
+ def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
/** {{{
* RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
@@ -3057,14 +3043,14 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && !isCaseDefStart) {
+ while (!isStatSeqEnd && !isCaseDefEnd) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && !isCaseDefStart) acceptStatSep()
+ if (!isCaseDefEnd) acceptStatSep()
}
else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 6957f85689..b12be1a056 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -21,19 +21,24 @@ trait ScannersCommon {
val global : Global
import global._
+ /** Offset into source character array */
+ type Offset = Int
+
+ type Token = Int
+
trait CommonTokenData {
- def token: Int
+ def token: Token
def name: TermName
}
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def error (off: Int, msg: String): Unit
- def incompleteInputError(off: Int, msg: String): Unit
- def deprecationWarning(off: Int, msg: String): Unit
+ def error(off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
}
- def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+ def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
val high = names.last._1
@@ -48,13 +53,10 @@ trait Scanners extends ScannersCommon {
val global : Global
import global._
- /** Offset into source character array */
- type Offset = Int
-
trait TokenData extends CommonTokenData {
/** the next token */
- var token: Int = EMPTY
+ var token: Token = EMPTY
/** the offset of the first character of the current token */
var offset: Offset = 0
@@ -169,7 +171,7 @@ trait Scanners extends ScannersCommon {
def isAtEnd = charOffset >= buf.length
- def resume(lastCode: Int) = {
+ def resume(lastCode: Token) = {
token = lastCode
if (next.token != EMPTY && !reporter.hasErrors)
syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -194,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Int = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER) {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -225,7 +227,7 @@ trait Scanners extends ScannersCommon {
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
- var sepRegions: List[Int] = List()
+ var sepRegions: List[Token] = List()
// Get next token ------------------------------------------------------------
@@ -583,7 +585,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token start a statement? */
- def inFirstOfStat(token: Int) = token match {
+ def inFirstOfStat(token: Token) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -593,7 +595,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token end a statement? */
- def inLastOfStat(token: Int) = token match {
+ def inLastOfStat(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -1122,7 +1124,7 @@ trait Scanners extends ScannersCommon {
def applyBracePatch(): Boolean = false
/** overridden in UnitScanners */
- def parenBalance(token: Int) = 0
+ def parenBalance(token: Token) = 0
/** overridden in UnitScanners */
def healBraces(): List[BracePatch] = List()
@@ -1137,7 +1139,7 @@ trait Scanners extends ScannersCommon {
// ------------- keyword configuration -----------------------------------
- private val allKeywords = List[(Name, Int)](
+ private val allKeywords = List[(Name, Token)](
nme.ABSTRACTkw -> ABSTRACT,
nme.CASEkw -> CASE,
nme.CATCHkw -> CATCH,
@@ -1191,8 +1193,8 @@ trait Scanners extends ScannersCommon {
nme.MACROkw -> IDENTIFIER,
nme.THENkw -> IDENTIFIER)
- private var kwOffset: Int = -1
- private val kwArray: Array[Int] = {
+ private var kwOffset: Offset = -1
+ private val kwArray: Array[Token] = {
val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
kwOffset = offset
arr
@@ -1203,7 +1205,7 @@ trait Scanners extends ScannersCommon {
// Token representation ----------------------------------------------------
/** Returns the string representation of given token. */
- def token2string(token: Int): String = (token: @switch) match {
+ def token2string(token: Token): String = (token: @switch) match {
case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
case CHARLIT => "character literal"
case INTLIT => "integer literal"
@@ -1234,7 +1236,7 @@ trait Scanners extends ScannersCommon {
}
}
- class MalformedInput(val offset: Int, val msg: String) extends Exception
+ class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
* Useful for looking inside source files that aren not currently compiled to see what's there
@@ -1262,7 +1264,7 @@ trait Scanners extends ScannersCommon {
lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
- override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+ override def parenBalance(token: Token) = parensAnalyzer.balance(token)
override def healBraces(): List[BracePatch] = {
var patches: List[BracePatch] = List()
@@ -1412,7 +1414,7 @@ trait Scanners extends ScannersCommon {
var tabSeen = false
- def line(offset: Int): Int = {
+ def line(offset: Offset): Int = {
def findLine(lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1423,7 +1425,7 @@ trait Scanners extends ScannersCommon {
else findLine(0, lineStart.length - 1)
}
- def column(offset: Int): Int = {
+ def column(offset: Offset): Int = {
var col = 0
var i = offset - 1
while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1485,6 +1487,6 @@ trait Scanners extends ScannersCommon {
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Int, msg: String) {}
+ override def error(offset: Offset, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 59abf99844..28d5aefc2b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,24 +8,21 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
*/
abstract class TreeBuilder {
-
val global: Global
import global._
- def freshName(): Name = freshName("x$")
- def freshTermName(): TermName = freshTermName("x$")
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, point: Int, end: Int): Position
+ implicit def fresh: FreshNameCreator = unit.fresh
+ def o2p(offset: Int): Position = Position.offset(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
@@ -325,7 +322,7 @@ abstract class TreeBuilder {
/* If `pat` is not yet a `Bind` wrap it in one with a fresh name */
def makeBind(pat: Tree): Tree = pat match {
case Bind(_, _) => pat
- case _ => Bind(freshName(), pat) setPos pat.pos
+ case _ => Bind(freshTermName(), pat) setPos pat.pos
}
/* A reference to the name bound in Bind `pat`. */
@@ -416,7 +413,7 @@ abstract class TreeBuilder {
* }
*/
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
- val binder = freshTermName("x")
+ val binder = freshTermName()
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
@@ -520,13 +517,3 @@ abstract class TreeBuilder {
}
}
}
-
-abstract class UnitTreeBuilder extends TreeBuilder {
- import global._
- def unit: CompilationUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
- def o2p(offset: Int): Position = Position.offset(unit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index dea4c46e79..03aad71165 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -289,7 +289,7 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo {
+ val s = context.owner.newValue(unit.freshTermName(), arg.pos, newFlags = ARTIFACT) setInfo {
val tp = if (byName) functionType(Nil, argTpe) else argTpe
uncheckedBounds(tp)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index f3e8ac64f4..f69b8a9697 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -281,7 +281,7 @@ trait PatternTypers {
else TypeBounds.lower(tpSym.tpeHK)
)
// origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds)
skolemBuffer += skolem
logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*)
case tp1 => tp1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 97e9d6ef52..c385e7533a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3459,8 +3459,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
- if (unapply == QuasiquoteClass_api_unapply) macroExpandUnapply(this, tree, fun, unapply, args, mode, pt)
- else doTypedUnapply(tree, fun0, fun, args, mode, pt)
+ doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
deleted file mode 100644
index e877c990f0..0000000000
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.mutable
-
-trait FreshNameCreator {
- /** Do not call before after type checking ends.
- * PP: I think that directive needs to lose a word somewhere.
- */
- def newName(): String
- def newName(prefix: String): String
-}
-
-object FreshNameCreator {
- class Default extends FreshNameCreator {
- protected var counter = 0
- protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
-
- /**
- * Create a fresh name with the given prefix. It is guaranteed
- * that the returned name has never been returned by a previous
- * call to this function (provided the prefix does not end in a digit).
- */
- def newName(prefix: String): String = {
- val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
- counters(safePrefix) += 1
-
- safePrefix + counters(safePrefix)
- }
- def newName(): String = {
- counter += 1
- "$" + counter + "$"
- }
- }
-}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index fdc2613810..e94b7725cd 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -280,7 +280,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def parse(code: String): Tree = {
reporter.reset()
- val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStats())
+ val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStatsOrPackages())
throwIfErrors()
tree
}
@@ -340,7 +340,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val exporter = importer.reverse
}
- def apply[T](f: CompilerApi => T): T = {
+ private val toolBoxLock = new Object
+ def apply[T](f: CompilerApi => T): T = toolBoxLock.synchronized {
try f(api)
catch { case ex: FatalError => throw ToolBoxError(s"fatal compiler error", ex) }
finally api.compiler.cleanupCaches()
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
index dd849f2bca..f92c9aa845 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -154,7 +154,7 @@ trait Holes { self: Quasiquotes =>
object Hole {
def apply(splicee: Tree, holeCard: Cardinality): Hole = {
- if (splicee.tpe == null) return new Hole(splicee, UnknownLocation, holeCard)
+ if (method == nme.unapply) return new Hole(splicee, UnknownLocation, holeCard)
val (spliceeCard, elementTpe) = parseCardinality(splicee.tpe)
def cantSplice() = {
val holeCardMsg = if (holeCard != NoDot) s" with $holeCard" else ""
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 5a1a25cfa1..0b5ade0b4c 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -4,15 +4,16 @@ package quasiquotes
import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
import scala.tools.nsc.ast.parser.Tokens._
import scala.compat.Platform.EOL
-import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
import scala.collection.mutable.ListBuffer
+import scala.util.Try
/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
* A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
* Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
*/
trait Parsers { self: Quasiquotes =>
- import global._
+ import global.{Try => _, _}
abstract class Parser extends {
val global: self.global.type = self.global
@@ -54,7 +55,13 @@ trait Parsers { self: Quasiquotes =>
def isHole(name: Name): Boolean = holeMap.contains(name)
+ override implicit def fresh: FreshNameCreator = new FreshNameCreator {
+ override def newName(prefix: String) = super.newName(nme.QUASIQUOTE_PREFIX + prefix)
+ }
+
override val treeBuilder = new ParserTreeBuilder {
+ override implicit def fresh: FreshNameCreator = parser.fresh
+
// q"(..$xs)"
override def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree =
Apply(Ident(nme.QUASIQUOTE_TUPLE), trees)
@@ -94,8 +101,6 @@ trait Parsers { self: Quasiquotes =>
override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
- override def isCaseDefStart: Boolean = super.isCaseDefStart || (in.token == EOF)
-
override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
@@ -140,11 +145,18 @@ trait Parsers { self: Quasiquotes =>
case Ident(name) if isHole(name) => true
case _ => false
})
+
+ override def topStat = super.topStat.orElse {
+ case _ if isHole =>
+ val stats = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ stats
+ }
}
}
object TermParser extends Parser {
- def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateStats()) }
+ def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateOrTopStatSeq()) }
}
object TypeParser extends Parser {
@@ -161,4 +173,14 @@ trait Parsers { self: Quasiquotes =>
parser.treeBuilder.patvarTransformer.transform(pat)
}
}
+
+ object FreshName {
+ def unapply(name: Name): Option[String] =
+ name.toString.split("\\$") match {
+ case Array(qq, left, right) if qq + "$" == nme.QUASIQUOTE_PREFIX && Try(right.toInt).isSuccess =>
+ Some(left + "$")
+ case _ =>
+ None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index c2b219ee31..c31d1fcd12 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -32,10 +32,17 @@ trait Placeholders { self: Quasiquotes =>
def appendHole(tree: Tree, cardinality: Cardinality) = {
val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
sb.append(placeholderName)
- holeMap(placeholderName) = Hole(tree, cardinality)
+ val holeTree = if (method == nme.unapply) Bind(placeholderName, Ident(nme.WILDCARD)) else tree
+ holeMap(placeholderName) = Hole(holeTree, cardinality)
}
- foreach2(args, parts.init) { case (tree, (p, pos)) =>
+ val iargs = method match {
+ case nme.apply => args
+ case nme.unapply => List.fill(parts.length - 1)(EmptyTree)
+ case _ => global.abort("unreachable")
+ }
+
+ foreach2(iargs, parts.init) { case (tree, (p, pos)) =>
val (part, cardinality) = parseDots(p)
appendPart(part, pos)
appendHole(tree, cardinality)
@@ -47,7 +54,7 @@ trait Placeholders { self: Quasiquotes =>
}
class HoleMap {
- private val underlying = mutable.ListMap[String, Hole]()
+ private var underlying = immutable.SortedMap[String, Hole]()
private val accessed = mutable.Set[String]()
def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
def contains(key: Name) = underlying.contains(key.toString)
@@ -64,6 +71,7 @@ trait Placeholders { self: Quasiquotes =>
accessed += s
underlying.get(s)
}
+ def toList = underlying.toList
}
// Step 2: Transform vanilla Scala AST into an AST with holes
@@ -146,4 +154,11 @@ trait Placeholders { self: Quasiquotes =>
case _ => None
}
}
+
+ object PackageStatPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case ValDef(NoMods, Placeholder(tree, location, card), Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) => Some((tree, location, card))
+ case _ => None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index 1305e25240..9e98dcbc8b 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -14,8 +14,9 @@ abstract class Quasiquotes extends Parsers
def debug(msg: String): Unit =
if (settings.Yquasiquotedebug.value) println(msg)
- lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
+ lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
+ debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
val parts1 = parts0.map {
case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
@@ -32,7 +33,7 @@ abstract class Quasiquotes extends Parsers
case nme.pq => PatternParser.parse(_)
case other => global.abort(s"Unknown quasiquote flavor: $other")
}
- (universe0, args0, parts1, parse0, reify0)
+ (universe0, args0, parts1, parse0, reify0, method0)
case _ =>
global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
}
@@ -41,11 +42,18 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
+ debug(s"\nmacro application:\n${c.macroApplication}\n")
debug(s"\ncode to parse:\n$code\n")
val tree = parse(code)
debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
- debug(s"reified tree:\n$reified\n")
+ val sreified =
+ reified
+ .toString
+ .replace("scala.reflect.runtime.`package`.universe.build.", "")
+ .replace("scala.reflect.runtime.`package`.universe.", "")
+ .replace("scala.collection.immutable.", "")
+ debug(s"reified tree:\n$sreified\n")
reified
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 18999e8267..3d1ecf95b2 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -7,10 +7,8 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
- import global.build.{SyntacticClassDef, SyntacticTraitDef, SyntacticModuleDef,
- SyntacticDefDef, SyntacticValDef, SyntacticVarDef,
- SyntacticBlock, SyntacticApplied, SyntacticTypeApplied,
- SyntacticFunction, SyntacticNew, SyntacticAssign}
+ import global.build.{Select => _, Ident => _, TypeTree => _, _}
+ import global.treeInfo._
import global.definitions._
import Cardinality._
import universeTypes._
@@ -29,12 +27,89 @@ trait Reifiers { self: Quasiquotes =>
def action = if (isReifyingExpressions) "splice" else "extract"
def holesHaveTypes = isReifyingExpressions
+ /** Map that stores freshly generated names linked to the corresponding names in the reified tree.
+ * This information is used to reify names created by calls to freshTermName and freshTypeName.
+ */
+ var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+
+ /** Wraps expressions into:
+ * a sequence of nested withFreshTermName/withFreshTypeName calls which are required
+ * to force regeneration of randomly generated names on every evaluation of quasiquote.
+ *
+ * Wraps patterns into:
+ * a call into anonymous class' unapply method required by unapply macro expansion:
+ *
+ * new {
+ * def unapply(tree) = tree match {
+ * case pattern if guard => Some(result)
+ * case _ => None
+ * }
+ * }.unapply(<unapply-selector>)
+ *
+ * where pattern corresponds to reified tree and guard represents conjunction of equalities
+ * which check that pairs of names in nameMap.values are equal between each other.
+ */
+ def wrap(tree: Tree) =
+ if (isReifyingExpressions) {
+ nameMap.foldLeft(tree) {
+ case (t, (origname, names)) =>
+ assert(names.size == 1)
+ val FreshName(prefix) = origname
+ val ctor = TermName("withFresh" + (if (origname.isTermName) "TermName" else "TypeName"))
+ // q"$u.build.$ctor($prefix) { ${names.head} => $t }"
+ Apply(Apply(Select(Select(u, nme.build), ctor), List(Literal(Constant(prefix)))),
+ List(Function(List(ValDef(Modifiers(PARAM), names.head, TypeTree(), EmptyTree)), t)))
+ }
+ } else {
+ val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
+ val cases =
+ if(isVarPattern) {
+ val Ident(name) :: Nil = freevars
+ // cq"$name: $treeType => $SomeModule($name)" :: Nil
+ CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))),
+ EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil
+ } else {
+ val (succ, fail) = freevars match {
+ case Nil =>
+ // (q"true", q"false")
+ (Literal(Constant(true)), Literal(Constant(false)))
+ case head :: Nil =>
+ // (q"$SomeModule($head)", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(head)), Ident(NoneModule))
+ case vars =>
+ // (q"$SomeModule((..$vars))", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(SyntacticTuple(vars))), Ident(NoneModule))
+ }
+ val guard =
+ nameMap.collect { case (_, nameset) if nameset.size >= 2 =>
+ nameset.toList.sliding(2).map { case List(n1, n2) =>
+ // q"$n1 == $n2"
+ Apply(Select(Ident(n1), nme.EQ), List(Ident(n2)))
+ }
+ }.flatten.reduceOption[Tree] { (l, r) =>
+ // q"$l && $r"
+ Apply(Select(l, nme.ZAND), List(r))
+ }.getOrElse { EmptyTree }
+ // cq"$tree if $guard => $succ" :: cq"_ => $fail" :: Nil
+ CaseDef(tree, guard, succ) :: CaseDef(Ident(nme.WILDCARD), EmptyTree, fail) :: Nil
+ }
+ // q"new { def unapply(tree: $AnyClass) = tree match { case ..$cases } }.unapply(..$args)"
+ Apply(
+ Select(
+ SyntacticNew(Nil, Nil, noSelfType, List(
+ DefDef(NoMods, nme.unapply, Nil, List(List(ValDef(NoMods, nme.tree, TypeTree(AnyClass.toType), EmptyTree))), TypeTree(),
+ Match(Ident(nme.tree), cases)))),
+ nme.unapply),
+ args)
+ }
+
def reifyFillingHoles(tree: Tree): Tree = {
val reified = reifyTree(tree)
holeMap.unused.foreach { hole =>
c.abort(holeMap(hole).tree.pos, s"Don't know how to $action here")
}
- reified
+ wrap(reified)
}
override def reifyTree(tree: Tree): Tree =
@@ -51,6 +126,7 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, location, _) => reifyCase(tree, location)
case RefineStatPlaceholder(tree, _, _) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, _) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, _) => reifyPackageStat(tree)
case _ => EmptyTree
}
@@ -60,18 +136,23 @@ trait Reifiers { self: Quasiquotes =>
case SyntacticClassDef(mods, name, tparams, constrmods, vparamss, earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, vparamss,
earlyDefs, parents, selfdef, body)
- case SyntacticModuleDef(mods, name, earlyDefs, parents, selfdef, body) =>
- reifyBuildCall(nme.SyntacticModuleDef, mods, name, earlyDefs, parents, selfdef, body)
+ case SyntacticPackageObjectDef(name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticPackageObjectDef, name, earlyDefs, parents, selfdef, body)
+ case SyntacticObjectDef(mods, name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
case SyntacticNew(earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
- case SyntacticValDef(mods, name, tpt, rhs) =>
+ case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
case SyntacticVarDef(mods, name, tpt, rhs) =>
reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
case SyntacticAssign(lhs, rhs) =>
reifyBuildCall(nme.SyntacticAssign, lhs, rhs)
+ case SyntacticApplied(fun, List(args))
+ if args.forall { case Placeholder(_, _, DotDotDot) => false case _ => true } =>
+ reifyBuildCall(nme.SyntacticApply, fun, args)
case SyntacticApplied(fun, argss) if argss.nonEmpty =>
reifyBuildCall(nme.SyntacticApplied, fun, argss)
case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
@@ -94,6 +175,12 @@ trait Reifiers { self: Quasiquotes =>
case Placeholder(tree, location, _) =>
if (holesHaveTypes && !(location.tpe <:< nameType)) c.abort(tree.pos, s"$nameType expected but ${location.tpe} found")
tree
+ case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
+ def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+ def introduceName() = { val n = fresh(); nameMap(name) += n; n}
+ def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
+ if (isReifyingPatterns) result(introduceName())
+ else result(nameMap.get(name).map { _.head }.getOrElse { introduceName() })
case _ =>
super.reifyName(name)
}
@@ -131,6 +218,8 @@ trait Reifiers { self: Quasiquotes =>
def reifyAnnotation(tree: Tree) = tree
+ def reifyPackageStat(tree: Tree) = tree
+
/** Splits list into a list of groups where subsequent elements are considered
* similar by the corresponding function.
*
@@ -185,6 +274,8 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, _, DotDot) => tree
case RefineStatPlaceholder(tree, _, DotDot) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, DotDot) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, DotDot) => reifyPackageStat(tree)
+
case List(Placeholder(tree, _, DotDotDot)) => tree
} {
reify(_)
@@ -201,13 +292,13 @@ trait Reifiers { self: Quasiquotes =>
// to overload the same tree for two different concepts:
// - MUTABLE that is used to override ValDef for vars
// - TRAIT that is used to override ClassDef for traits
- val nonoverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+ val nonOverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
// Traits automatically have ABSTRACT flag assigned to
// them so in that case it's not an explicit flag
val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
- if ((flags & nonoverloadedExplicitFlags) != 0L)
+ if ((flags & nonOverloadedExplicitFlags) != 0L)
c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
}
@@ -280,6 +371,8 @@ trait Reifiers { self: Quasiquotes =>
override def reifyEarlyDef(tree: Tree) = mirrorBuildCall(nme.mkEarlyDef, tree)
override def reifyAnnotation(tree: Tree) = mirrorBuildCall(nme.mkAnnotation, tree)
+
+ override def reifyPackageStat(tree: Tree) = mirrorBuildCall(nme.mkPackageStat, tree)
}
class UnapplyReifier extends Reifier {
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 551c27bf9c..28551b1dcd 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -80,12 +80,20 @@ private[reflect] trait BuildUtils { self: Universe =>
def mkRefineStat(stats: List[Tree]): List[Tree]
+ def mkPackageStat(stat: Tree): Tree
+
+ def mkPackageStat(stats: List[Tree]): List[Tree]
+
def mkEarlyDef(defn: Tree): Tree
def mkEarlyDef(defns: List[Tree]): List[Tree]
def RefTree(qual: Tree, sym: Symbol): Tree
+ def withFreshTermName[T](prefix: String)(f: TermName => T): T
+
+ def withFreshTypeName[T](prefix: String)(f: TypeName => T): T
+
val ScalaDot: ScalaDotExtractor
trait ScalaDotExtractor {
@@ -114,12 +122,19 @@ private[reflect] trait BuildUtils { self: Universe =>
def unapply(tree: Tree): Some[(Tree, List[List[Tree]])]
}
+ val SyntacticApply: SyntacticApplyExtractor
+
+ trait SyntacticApplyExtractor {
+ def apply(tree: Tree, args: List[Tree]): Tree
+ def unapply(tree: Tree): Some[(Tree, List[Tree])]
+ }
+
val SyntacticClassDef: SyntacticClassDefExtractor
trait SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -128,19 +143,27 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticTraitDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
- earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
List[Tree], List[Tree], ValDef, List[Tree])]
}
- val SyntacticModuleDef: SyntacticModuleDefExtractor
+ val SyntacticObjectDef: SyntacticObjectDefExtractor
- trait SyntacticModuleDefExtractor {
+ trait SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
+ val SyntacticPackageObjectDef: SyntacticPackageObjectDefExtractor
+
+ trait SyntacticPackageObjectDefExtractor {
+ def apply(name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
+ def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])]
+ }
+
val SyntacticTuple: SyntacticTupleExtractor
val SyntacticTupleType: SyntacticTupleExtractor
@@ -159,7 +182,7 @@ private[reflect] trait BuildUtils { self: Universe =>
val SyntacticNew: SyntacticNewExtractor
trait SyntacticNewExtractor {
- def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])]
}
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
index 08d3274ca5..8e993af382 100644
--- a/src/reflect/scala/reflect/api/Quasiquotes.scala
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -8,7 +8,7 @@ trait Quasiquotes { self: Universe =>
implicit class Quasiquote(ctx: StringContext) {
protected trait api {
def apply(args: Any*): Any = macro ???
- def unapply(subpatterns: Any*): Option[Any] = macro ???
+ def unapply(scrutinee: Any): Any = macro ???
}
object q extends api
object tq extends api
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 05aaa462c4..19c67879f5 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -38,7 +38,7 @@ trait BaseTypeSeqs {
* This is necessary because when run from reflection every base type sequence needs to have a
* SynchronizedBaseTypeSeq as mixin.
*/
- class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
+ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount)
if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 951efd90ed..fc6b26db3f 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -98,6 +98,18 @@ trait BuildUtils { self: SymbolTable =>
def mkRefineStat(stats: List[Tree]): List[Tree] = stats.map(mkRefineStat)
+ def mkPackageStat(stat: Tree): Tree = {
+ stat match {
+ case cd: ClassDef =>
+ case md: ModuleDef =>
+ case pd: PackageDef =>
+ case _ => throw new IllegalArgumentException(s"not legal package stat: $stat")
+ }
+ stat
+ }
+
+ def mkPackageStat(stats: List[Tree]): List[Tree] = stats.map(mkPackageStat)
+
object ScalaDot extends ScalaDotExtractor {
def apply(name: Name): Tree = gen.scalaDot(name)
def unapply(tree: Tree): Option[Name] = tree match {
@@ -110,7 +122,7 @@ trait BuildUtils { self: SymbolTable =>
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
copyValDef(vdef)(mods = mods | PRESUPER)
case tdef @ TypeDef(mods, _, _, _) =>
- copyTypeDef(tdef)(mods = mods | PRESUPER)
+ copyTypeDef(tdef)(mods = mods | PRESUPER)
case _ =>
throw new IllegalArgumentException(s"not legal early def: $defn")
}
@@ -119,6 +131,12 @@ trait BuildUtils { self: SymbolTable =>
def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
+ def withFreshTermName[T](prefix: String)(f: TermName => T): T = f(freshTermName(prefix))
+
+ def withFreshTypeName[T](prefix: String)(f: TypeName => T): T = f(freshTypeName(prefix))
+
+ private implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
+
object FlagsRepr extends FlagsReprExtractor {
def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
@@ -148,6 +166,15 @@ trait BuildUtils { self: SymbolTable =>
}
}
+ object SyntacticApply extends SyntacticApplyExtractor {
+ def apply(tree: Tree, args: List[Tree]): Tree = SyntacticApplied(tree, List(args))
+
+ def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
+ case Apply(fun, args) => Some((fun, args))
+ case other => Some((other, Nil))
+ }
+ }
+
private object UnCtor {
def unapply(tree: Tree): Option[(Modifiers, List[List[ValDef]], List[Tree])] = tree match {
case DefDef(mods, nme.MIXIN_CONSTRUCTOR, _, _, _, Block(lvdefs, _)) =>
@@ -160,9 +187,9 @@ trait BuildUtils { self: SymbolTable =>
private object UnMkTemplate {
def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
- val Template(parents, selfdef, tbody) = templ
+ val Template(parents, selfType, tbody) = templ
def result(ctorMods: Modifiers, vparamss: List[List[ValDef]], edefs: List[Tree], body: List[Tree]) =
- Some((parents, selfdef, ctorMods, vparamss, edefs, body))
+ Some((parents, selfType, ctorMods, vparamss, edefs, body))
def indexOfCtor(trees: List[Tree]) =
trees.indexWhere { case UnCtor(_, _, _) => true ; case _ => false }
@@ -202,7 +229,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef = {
val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
val vparamss0 = vparamss.map { _.map { vd => copyValDef(vd)(mods = (vd.mods | extraFlags) & (~DEFERRED)) } }
val tparams0 = mkTparams(tparams)
@@ -213,15 +240,15 @@ trait BuildUtils { self: SymbolTable =>
} else parents
)
val body0 = earlyDefs ::: body
- val templ = gen.mkTemplate(parents0, selfdef, constrMods, vparamss0, body0)
+ val templ = gen.mkTemplate(parents0, selfType, constrMods, vparamss0, body0)
gen.mkClassDef(mods, name, tparams0, templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
if !ctorMods.isTrait && !ctorMods.hasFlag(JAVA) =>
- Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfdef, body))
+ Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
case _ =>
None
}
@@ -229,29 +256,42 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticTraitDef extends SyntacticTraitDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef = {
val mods0 = mods | TRAIT | ABSTRACT
- val templ = gen.mkTemplate(parents, selfdef, Modifiers(TRAIT), Nil, earlyDefs ::: body)
+ val templ = gen.mkTemplate(parents, selfType, Modifiers(TRAIT), Nil, earlyDefs ::: body)
gen.mkClassDef(mods0, name, mkTparams(tparams), templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
if mods.isTrait =>
- Some((mods, name, tparams, earlyDefs, parents, selfdef, body))
+ Some((mods, name, tparams, earlyDefs, parents, selfType, body))
case _ => None
}
}
- object SyntacticModuleDef extends SyntacticModuleDefExtractor {
+ object SyntacticObjectDef extends SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]) =
- ModuleDef(mods, name, gen.mkTemplate(parents, selfdef, NoMods, Nil, earlyDefs ::: body))
+ parents: List[Tree], selfType: ValDef, body: List[Tree]) =
+ ModuleDef(mods, name, gen.mkTemplate(parents, selfType, NoMods, Nil, earlyDefs ::: body))
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ModuleDef(mods, name, UnMkTemplate(parents, selfdef, _, _, earlyDefs, body)) =>
- Some((mods, name, earlyDefs, parents, selfdef, body))
+ case ModuleDef(mods, name, UnMkTemplate(parents, selfType, _, _, earlyDefs, body)) =>
+ Some((mods, name, earlyDefs, parents, selfType, body))
+ case _ =>
+ None
+ }
+ }
+
+ object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
+ def apply(name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree =
+ gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
+
+ def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case PackageDef(Ident(name: TermName), List(SyntacticObjectDef(NoMods, nme.PACKAGEkw, earlyDefs, parents, selfType, body))) =>
+ Some((name, earlyDefs, parents, selfType, body))
case _ =>
None
}
@@ -359,15 +399,15 @@ trait BuildUtils { self: SymbolTable =>
}
object SyntacticNew extends SyntacticNewExtractor {
- def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree =
- gen.mkNew(parents, selfdef, earlyDefs ::: body, NoPosition, NoPosition)
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree =
+ gen.mkNew(parents, selfType, earlyDefs ::: body, NoPosition, NoPosition)
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case SyntacticApplied(Select(New(SyntacticTypeApplied(ident, targs)), nme.CONSTRUCTOR), argss) =>
Some((Nil, SyntacticApplied(SyntacticTypeApplied(ident, targs), argss) :: Nil, noSelfType, Nil))
- case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfdef, body) ::
+ case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfType, body) ::
Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
- Some((earlyDefs, parents, selfdef, body))
+ Some((earlyDefs, parents, selfType, body))
case _ =>
None
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 99aad4f057..7b88514429 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -289,12 +289,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val ConstantFalse = ConstantType(Constant(false))
lazy val ConstantNull = ConstantType(Constant(null))
- // Note: this is not the type alias AnyRef, it's a companion-like
- // object used by the @specialize annotation.
- lazy val AnyRefModule = getMemberModule(ScalaPackageClass, nme.AnyRef)
- @deprecated("Use AnyRefModule", "2.10.0")
- def Predef_AnyRef = AnyRefModule
-
lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
@@ -1180,14 +1174,21 @@ trait Definitions extends api.StandardDefinitions {
}
lazy val AnnotationDefaultAttr: ClassSymbol = {
- val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
- // This attribute needs a constructor so that modifiers in parsed Java code make sense
- attr.info.decls enter attr.newClassConstructor(NoPosition)
- attr
+ val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L)
+ sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym)
+ RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match {
+ case existing :: _ =>
+ existing.asInstanceOf[ClassSymbol]
+ case _ =>
+ RuntimePackageClass.info.decls enter sym
+ // This attribute needs a constructor so that modifiers in parsed Java code make sense
+ sym.info.decls enter sym.newClassConstructor(NoPosition)
+ sym
+ }
}
- private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member") = {
- throw new FatalError(owner + " does not have a " + what + " " + name)
+ private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member", addendum: String = "") = {
+ throw new FatalError(owner + " does not have a " + what + " " + name + addendum)
}
def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
@@ -1222,7 +1223,8 @@ trait Definitions extends api.StandardDefinitions {
def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
getMember(owner, name.toTermName) match {
case x: ModuleSymbol => x
- case _ => fatalMissingSymbol(owner, name, "member object")
+ case NoSymbol => fatalMissingSymbol(owner, name, "member object")
+ case other => fatalMissingSymbol(owner, name, "member object", addendum = s". A symbol ${other} of kind ${other.accurateKindString} already exists.")
}
}
def getTypeMember(owner: Symbol, name: Name): TypeSymbol = {
@@ -1388,10 +1390,13 @@ trait Definitions extends api.StandardDefinitions {
else flatNameString(etp.typeSymbol, '.')
}
+ // documented in JavaUniverse.init
def init() {
if (isInitialized) return
- // force initialization of every symbol that is synthesized or hijacked by the compiler
- val _ = symbolsNotPresentInBytecode
+ ObjectClass.initialize
+ ScalaPackageClass.initialize
+ val forced1 = symbolsNotPresentInBytecode
+ val forced2 = NoSymbol
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 6ed9de8e20..e122fa498b 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -250,6 +250,19 @@ trait Mirrors extends api.Mirrors {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
+ if (rootOwner != NoSymbol) {
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ definitions.syntheticCoreClasses foreach (theirSym => {
+ val theirOwner = theirSym.owner
+ assert(theirOwner.isPackageClass, s"theirSym = $theirSym, theirOwner = $theirOwner")
+ val ourOwner = staticPackage(theirOwner.fullName).moduleClass
+ val ourSym = theirSym // just copy the symbol into our branch of the symbol table
+ ourOwner.info.decls enterIfNew ourSym
+ })
+ }
+
initialized = true
}
}
@@ -274,34 +287,45 @@ trait Mirrors extends api.Mirrors {
def mirror = thisMirror.asInstanceOf[Mirror]
}
- // This is the package _root_. The actual root cannot be referenced at
- // the source level, but _root_ is essentially a function => <root>.
- final object RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
+ class RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
this setInfo NullaryMethodType(RootClass.tpe)
RootClass.sourceModule = this
override def isRootPackage = true
}
+
+ // This is the package _root_. The actual root cannot be referenced at
+ // the source level, but _root_ is essentially a function => <root>.
+ lazy val RootPackage = new RootPackage
+
+ class RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
+ this setInfo rootLoader
+
+ override def isRoot = true
+ override def isEffectiveRoot = true
+ override def isNestedClass = false
+ }
+
// This is <root>, the actual root of everything except the package _root_.
// <root> and _root_ (RootPackage and RootClass) should be the only "well known"
// symbols owned by NoSymbol. All owner chains should go through RootClass,
// although it is probable that some symbols are created as direct children
// of NoSymbol to ensure they will not be stumbled upon. (We should designate
// a better encapsulated place for that.)
- final object RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
- this setInfo rootLoader
+ lazy val RootClass = new RootClass
- override def isRoot = true
- override def isEffectiveRoot = true
- override def isNestedClass = false
- }
- // The empty package, which holds all top level types without given packages.
- final object EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+ class EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
override def isEmptyPackage = true
}
- final object EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+
+ // The empty package, which holds all top level types without given packages.
+ lazy val EmptyPackage = new EmptyPackage
+
+ class EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
override def isEffectiveRoot = true
override def isEmptyPackageClass = true
}
+
+ lazy val EmptyPackageClass = new EmptyPackageClass
}
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 1060b3a99c..b7a1681838 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -139,6 +139,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
enter(sym)
}
+ def enterIfNew[T <: Symbol](sym: T): T = {
+ val existing = lookupEntry(sym.name)
+ if (existing == null) enter(sym)
+ else existing.sym.asInstanceOf[T]
+ }
+
private def createHash() {
hashtable = new Array[ScopeEntry](HASHSIZE)
enterAllInHash(elems)
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index af26253802..9f56e78059 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -247,11 +247,12 @@ trait StdNames {
final val Quasiquote: NameType = "Quasiquote"
// quasiquote-specific names
- final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
- final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
- final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
- final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
- final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
+ final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
+ final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
+ final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_PACKAGE_STAT: NameType = "$quasiquote$package$stat$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -273,6 +274,8 @@ trait StdNames {
final val SourceFileATTR: NameType = "SourceFile"
final val SyntheticATTR: NameType = "Synthetic"
+ final val scala_ : NameType = "scala"
+
def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName
def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName
def implClassName(name: Name): TypeName = (name append IMPL_CLASS_SUFFIX).toTypeName
@@ -319,6 +322,7 @@ trait StdNames {
val REIFY_FREE_VALUE_SUFFIX: NameType = "$value"
val REIFY_SYMDEF_PREFIX: NameType = "symdef$"
val QUASIQUOTE_PREFIX: String = "qq$"
+ val QUASIQUOTE_NAME_PREFIX: String = "nn$"
val QUASIQUOTE_FILE: String = "<quasiquote>"
val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
val QUASIQUOTE_CASE: NameType = "$quasiquote$case$"
@@ -582,13 +586,15 @@ trait StdNames {
val Select: NameType = "Select"
val SelectFromTypeTree: NameType = "SelectFromTypeTree"
val SyntacticApplied: NameType = "SyntacticApplied"
+ val SyntacticApply: NameType = "SyntacticApply"
val SyntacticAssign: NameType = "SyntacticAssign"
val SyntacticBlock: NameType = "SyntacticBlock"
val SyntacticClassDef: NameType = "SyntacticClassDef"
val SyntacticDefDef: NameType = "SyntacticDefDef"
val SyntacticFunction: NameType = "SyntacticFunction"
- val SyntacticFunctionType: NameType= "SyntacticFunctionType"
- val SyntacticModuleDef: NameType = "SyntacticModuleDef"
+ val SyntacticFunctionType: NameType = "SyntacticFunctionType"
+ val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef"
+ val SyntacticObjectDef: NameType = "SyntacticObjectDef"
val SyntacticNew: NameType = "SyntacticNew"
val SyntacticTraitDef: NameType = "SyntacticTraitDef"
val SyntacticTuple: NameType = "SyntacticTuple"
@@ -683,6 +689,7 @@ trait StdNames {
val mkAnnotation: NameType = "mkAnnotation"
val mkRefineStat: NameType = "mkRefineStat"
val mkEarlyDef: NameType = "mkEarlyDef"
+ val mkPackageStat: NameType = "mkPackageStat"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 98466ebb2b..8386d02b7c 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -242,12 +242,20 @@ abstract class SymbolTable extends macros.Universe
finally popPhase(saved)
}
+ def slowButSafeEnteringPhase[T](ph: Phase)(op: => T): T = {
+ if (isCompilerUniverse) enteringPhase(ph)(op)
+ else op
+ }
+
@inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op)
@inline final def enteringPrevPhase[T](op: => T): T = enteringPhase(phase.prev)(op)
@inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op
+ def slowButSafeEnteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+ if (isCompilerUniverse) enteringPhaseNotLaterThan(target)(op) else op
+
final def isValid(period: Period): Boolean =
period != 0 && runId(period) == currentRunId && {
val pid = phaseId(period)
@@ -375,6 +383,11 @@ abstract class SymbolTable extends macros.Universe
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
+
+ // fresh name creation
+ def currentFreshNameCreator: FreshNameCreator
+ def freshTermName(prefix: String = "x$")(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
+ def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
}
object SymbolTableStats {
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 71de02ef9e..ba785c14bd 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -30,13 +30,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
- private var recursionTable = immutable.Map.empty[Symbol, Int]
+ private var _recursionTable = immutable.Map.empty[Symbol, Int]
+ def recursionTable = _recursionTable
+ def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable = value
- private var nextexid = 0
- protected def freshExistentialName(suffix: String) = {
- nextexid += 1
- newTypeName("_" + nextexid + suffix)
- }
+ private var existentialIds = 0
+ protected def nextExistentialId() = { existentialIds += 1; existentialIds }
+ protected def freshExistentialName(suffix: String) = newTypeName("_" + nextExistentialId() + suffix)
// Set the fields which point companions at one another. Returns the module.
def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
@@ -110,10 +110,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
children
}
+ def selfType = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ typeOfThis
+ }
+
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
- def selfType: Type = typeOfThis
def typeSignature: Type = { fullyInitializeSymbol(this); info }
def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
@@ -127,6 +131,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setter: Symbol = setter(owner)
}
+ private[reflect] case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
+
/** The class for all symbols */
abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
extends SymbolContextApiImpl
@@ -800,7 +806,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
*/
- final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
@@ -949,6 +955,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
+ /** Some completers call sym.setInfo when still in-flight and then proceed with initialization (e.g. see LazyPackageType)
+ * setInfo sets _validTo to current period, which means that after a call to setInfo isInitialized will start returning true.
+ * Unfortunately, this doesn't mean that info becomes ready to be used, because subsequent initialization might change the info.
+ * Therefore we need this method to distinguish between initialized and really initialized symbol states.
+ */
+ final def isFullyInitialized: Boolean = _validTo != NoPeriod && (flags & LOCKED) == 0
+
/** Can this symbol be loaded by a reflective mirror?
*
* Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
@@ -1563,6 +1576,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters later.
*/
+ // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+ // don't forget to modify the code over there if you modify this method
def unsafeTypeParams: List[Symbol] =
if (isMonomorphicType) Nil
else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
@@ -1571,6 +1586,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters later.
*/
+ // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+ // don't forget to modify the code over there if you modify this method
def typeParams: List[Symbol] =
if (isMonomorphicType) Nil
else {
@@ -2393,7 +2410,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isTerm && (!isParameter || isParamAccessor)) "val"
else ""
- private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
private def symbolKind: SymbolKind = {
var kind =
if (isTermMacro) ("term macro", "macro method", "MACM")
@@ -3124,8 +3140,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def thisType: Type = {
val period = thisTypePeriod
if (period != currentPeriod) {
- thisTypePeriod = currentPeriod
if (!isValid(period)) thisTypeCache = ThisType(this)
+ thisTypePeriod = currentPeriod
}
thisTypeCache
}
@@ -3213,9 +3229,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def typeOfThis = {
val period = typeOfThisPeriod
if (period != currentPeriod) {
- typeOfThisPeriod = currentPeriod
if (!isValid(period))
typeOfThisCache = singleType(owner.thisType, sourceModule)
+ typeOfThisPeriod = currentPeriod
}
typeOfThisCache
}
@@ -3226,9 +3242,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// Skip a package object class, because the members are also in
// the package and we wish to avoid spurious ambiguities as in pos/t3999.
if (!isPackageObjectClass) {
+ implicitMembersCacheValue = tp.implicitMembers
implicitMembersCacheKey1 = tp
implicitMembersCacheKey2 = tp.decls.elems
- implicitMembersCacheValue = tp.implicitMembers
}
}
implicitMembersCacheValue
@@ -3336,10 +3352,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def name = nme.NO_NAME
override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
- synchronized {
- setInfo(NoType)
- privateWithin = this
- }
+ // Syncnote: no need to synchronize this, because NoSymbol's initialization is triggered by JavaUniverse.init
+ // which is called in universe's constructor - something that's inherently single-threaded
+ setInfo(NoType)
+ privateWithin = this
+
override def info_=(info: Type) = {
infos = TypeHistory(1, NoType, null)
unlock()
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 720d8bfe4a..cf7c729a6a 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -448,4 +448,10 @@ abstract class TreeGen extends macros.TreeBuilder {
case _ =>
Assign(lhs, rhs)
}
+
+ def mkPackageObject(defn: ModuleDef, pidPos: Position = NoPosition, pkgPos: Position = NoPosition) = {
+ val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
+ val pid = atPos(pidPos)(Ident(defn.name))
+ atPos(pkgPos)(PackageDef(pid, module :: Nil))
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 9344212294..204a2e7088 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -115,14 +115,17 @@ trait Types
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
- var skolemizationLevel = 0
+ private var _skolemizationLevel = 0
+ def skolemizationLevel = _skolemizationLevel
+ def skolemizationLevel_=(value: Int) = _skolemizationLevel = value
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
* not on the refinement.
*/
- val intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+ private val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+ def intersectionWitness = _intersectionWitness
/** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
@@ -1974,12 +1977,12 @@ trait Types
def apply(value: Constant) = unique(new UniqueConstantType(value))
}
- /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
- * with synchronized, because they are accessed only from isVolatile, which is called only from
- * Typer.
- */
- private var volatileRecursions: Int = 0
- private val pendingVolatiles = new mutable.HashSet[Symbol]
+ private var _volatileRecursions: Int = 0
+ def volatileRecursions = _volatileRecursions
+ def volatileRecursions_=(value: Int) = _volatileRecursions = value
+
+ private val _pendingVolatiles = new mutable.HashSet[Symbol]
+ def pendingVolatiles = _pendingVolatiles
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
require(args0.nonEmpty, this)
@@ -3947,9 +3950,12 @@ trait Types
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
- private var basetypeRecursions: Int = 0
- private val pendingBaseTypes = new mutable.HashSet[Type]
+ private var _basetypeRecursions: Int = 0
+ def basetypeRecursions = _basetypeRecursions
+ def basetypeRecursions_=(value: Int) = _basetypeRecursions = value
+ private val _pendingBaseTypes = new mutable.HashSet[Type]
+ def pendingBaseTypes = _pendingBaseTypes
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
@@ -4449,7 +4455,9 @@ trait Types
}
/** The current indentation string for traces */
- protected[internal] var indent: String = ""
+ private var _indent: String = ""
+ protected def indent = _indent
+ protected def indent_=(value: String) = _indent = value
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index c4f1f0cf96..a6c34935ad 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -659,7 +659,7 @@ abstract class UnPickler {
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
if (p ne null)
- enteringPhase(p) (sym setInfo tp)
+ slowButSafeEnteringPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
}
@@ -677,7 +677,7 @@ abstract class UnPickler {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias.isOverloaded)
- alias = enteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+ alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
sym.asInstanceOf[TermSymbol].setAlias(alias)
}
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 6fa536d84c..6b33aca025 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -251,8 +251,11 @@ private[internal] trait GlbLubs {
else if (isNumericSubType(t2, t1)) t1
else IntTpe)
- private val lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ private val _lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ def lubResults = _lubResults
+
+ private val _glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ def glbResults = _glbResults
/** Given a list of types, finds all the base classes they have in
* common, then returns a list of type constructors derived directly
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 6532bce9f0..b60fecd66e 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -14,7 +14,8 @@ trait TypeComparers {
private final val LogPendingSubTypesThreshold = TypeConstants.DefaultLogThreshhold
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+ private val _pendingSubTypes = new mutable.HashSet[SubTypePair]
+ def pendingSubTypes = _pendingSubTypes
class SubTypePair(val tp1: Type, val tp2: Type) {
override def hashCode = tp1.hashCode * 41 + tp2.hashCode
@@ -33,7 +34,9 @@ trait TypeComparers {
override def toString = tp1+" <:<? "+tp2
}
- private var subsametypeRecursions: Int = 0
+ private var _subsametypeRecursions: Int = 0
+ def subsametypeRecursions = _subsametypeRecursions
+ def subsametypeRecursions_=(value: Int) = _subsametypeRecursions = value
private def isUnifiable(pre1: Type, pre2: Type) = (
(isEligibleForPrefixUnification(pre1) || isEligibleForPrefixUnification(pre2))
@@ -100,17 +103,13 @@ trait TypeComparers {
// isSameType1(tp1, tp2)
// }
- undoLog.lock()
+ val before = undoLog.log
+ var result = false
try {
- val before = undoLog.log
- var result = false
- try {
- result = isSameType1(tp1, tp2)
- }
- finally if (!result) undoLog.undoTo(before)
- result
+ result = isSameType1(tp1, tp2)
}
- finally undoLog.unlock()
+ finally if (!result) undoLog.undoTo(before)
+ result
}
finally {
subsametypeRecursions -= 1
@@ -256,30 +255,27 @@ trait TypeComparers {
// }
// }
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType1(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType1(tp1, tp2, depth)
- }
- } finally if (!result) undoLog.undoTo(before)
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType1(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType1(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
- result
- } finally undoLog.unlock()
+ result
} finally {
subsametypeRecursions -= 1
// XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index fdfe376c18..e2159d30f5 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -13,34 +13,14 @@ private[internal] trait TypeConstraints {
/** A log of type variable with their original constraints. Used in order
* to undo constraints in the case of isSubType/isSameType failure.
*/
- lazy val undoLog = newUndoLog
-
- protected def newUndoLog = new UndoLog
+ private lazy val _undoLog = new UndoLog
+ def undoLog = _undoLog
class UndoLog extends Clearable {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
//OPT this method is public so we can do `manual inlining`
var log: UndoPairs = List()
- /*
- * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
- *
- * The idea behind explicit locking mechanism is that all public methods that access mutable state
- * will have to obtain the lock for their entire execution so both reads and writes can be kept in
- * right order. Originally, that was achieved by overriding those public methods in
- * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
- * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
- * can go away.
- *
- * By using explicit locking we can achieve inlining.
- *
- * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
- * places implementation of `undo` or `undoUnless`). This should be changed back to protected
- * once inliner is fixed.
- */
- def lock(): Unit = ()
- def unlock(): Unit = ()
-
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
@@ -64,23 +44,16 @@ private[internal] trait TypeConstraints {
}
def clear() {
- lock()
- try {
- if (settings.debug)
- self.log("Clearing " + log.size + " entries from the undoLog.")
- log = Nil
- } finally unlock()
+ if (settings.debug)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
}
// `block` should not affect constraints on typevars
def undo[T](block: => T): T = {
- lock()
- try {
- val before = log
-
- try block
- finally undoTo(before)
- } finally unlock()
+ val before = log
+ try block
+ finally undoTo(before)
}
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
index 16929cca0f..ebc4394d25 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -10,7 +10,9 @@ private[internal] trait TypeToStrings {
*/
final val maxTostringRecursions = 50
- private var tostringRecursions = 0
+ private var _tostringRecursions = 0
+ def tostringRecursions = _tostringRecursions
+ def tostringRecursions_=(value: Int) = _tostringRecursions = value
protected def typeToString(tpe: Type): String =
if (tostringRecursions >= maxTostringRecursions) {
diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
new file mode 100644
index 0000000000..3e54de8e1e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect.internal
+package util
+
+import java.util.concurrent.ConcurrentHashMap
+import java.util.concurrent.atomic.AtomicLong
+import scala.collection.mutable
+import scala.reflect.NameTransformer
+
+class FreshNameCreator {
+ protected val counters = new ConcurrentHashMap[String, AtomicLong]()
+
+ /**
+ * Create a fresh name with the given prefix. It is guaranteed
+ * that the returned name has never been returned by a previous
+ * call to this function (provided the prefix does not end in a digit).
+ */
+ def newName(prefix: String): String = {
+ val safePrefix = NameTransformer.encode(prefix)
+ counters.putIfAbsent(safePrefix, new AtomicLong(0));
+ safePrefix + counters.get(safePrefix).incrementAndGet();
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/Gil.scala b/src/reflect/scala/reflect/runtime/Gil.scala
new file mode 100644
index 0000000000..0edb1e5748
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/Gil.scala
@@ -0,0 +1,25 @@
+package scala.reflect
+package runtime
+
+private[reflect] trait Gil {
+ self: SymbolTable =>
+
+ // fixme... please...
+ // there are the following avenues of optimization we discussed with Roland:
+ // 1) replace PackageScope locks with ConcurrentHashMap, because PackageScope materializers seem to be idempotent
+ // 2) unlock unpickling completers by verifying that they are idempotent or moving non-idempotent parts
+ // 3) remove the necessity in global state for isSubType
+ private lazy val gil = new java.util.concurrent.locks.ReentrantLock
+
+ @inline final def gilSynchronized[T](body: => T): T = {
+ if (isCompilerUniverse) body
+ else {
+ try {
+ gil.lock()
+ body
+ } finally {
+ gil.unlock()
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 1a15454500..1e2dd6b7d3 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -22,7 +22,7 @@ import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacS
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
-private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
+private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable =>
private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
@@ -44,19 +44,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
trait JavaClassCompleter extends FlagAssigningCompleter
- def init() = {
- definitions.AnyValClass // force it.
-
- // establish root association to avoid cyclic dependency errors later
- rootMirror.classToScala(classOf[java.lang.Object]).initialize
-
- // println("initializing definitions")
- definitions.init()
- }
-
- def runtimeMirror(cl: ClassLoader): Mirror = mirrors get cl match {
- case Some(WeakReference(m)) => m
- case _ => createMirror(rootMirror.RootClass, cl)
+ def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized {
+ mirrors get cl match {
+ case Some(WeakReference(m)) => m
+ case _ => createMirror(rootMirror.RootClass, cl)
+ }
}
/** The API of a mirror for a reflective universe */
@@ -69,6 +61,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
import definitions._
+ override lazy val RootPackage = new RootPackage with SynchronizedTermSymbol
+ override lazy val RootClass = new RootClass with SynchronizedModuleClassSymbol
+ override lazy val EmptyPackage = new EmptyPackage with SynchronizedTermSymbol
+ override lazy val EmptyPackageClass = new EmptyPackageClass with SynchronizedModuleClassSymbol
+
/** The lazy type for root.
*/
override lazy val rootLoader = new LazyType with FlagAgnosticCompleter {
@@ -689,7 +686,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
completeRest()
}
- def completeRest(): Unit = thisUniverse.synchronized {
+ def completeRest(): Unit = gilSynchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -894,7 +891,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* The Scala package with given fully qualified name. Unlike `packageNameToScala`,
* this one bypasses the cache.
*/
- private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = {
+ private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = gilSynchronized {
val split = fullname lastIndexOf '.'
val ownerModule: ModuleSymbol =
if (split > 0) packageNameToScala(fullname take split) else this.RootPackage
@@ -1275,11 +1272,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case _ => abort(s"${sym}.enclosingRootClass = ${sym.enclosingRootClass}, which is not a RootSymbol")
}
- private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
- def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
- Map() ++ (definitions.syntheticCoreClasses map mapEntry)
- }
-
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
* <owner>.<name>, otherwise return NoSymbol.
* Exception: If owner is root and a java class with given name exists, create symbol in empty package instead
@@ -1289,20 +1281,20 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def missingHook(owner: Symbol, name: Name): Symbol = {
if (owner.hasPackageFlag) {
val mirror = mirrorThatLoaded(owner)
- // todo. this makes toolbox tests pass, but it's a mere workaround for SI-5865
-// assert((owner.info decl name) == NoSymbol, s"already exists: $owner . $name")
if (owner.isRootSymbol && mirror.tryJavaClass(name.toString).isDefined)
return mirror.EmptyPackageClass.info decl name
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- syntheticCoreClasses get ((owner.fullName, name)) foreach { tsym =>
- // synthetic core classes are only present in root mirrors
- // because Definitions.scala, which initializes and enters them, only affects rootMirror
- // therefore we need to enter them manually for non-root mirrors
- if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
- return tsym
- }
+ if (name == tpnme.AnyRef && owner.owner.isRoot && owner.name == tpnme.scala_)
+ // when we synthesize the scala.AnyRef symbol, we need to add it to the scope of the scala package
+ // the problem is that adding to the scope implies doing something like `owner.info.decls enter anyRef`
+ // which entails running a completer for the scala package
+ // which will try to unpickle the stuff in scala/package.class
+ // which will transitively load scala.AnyRef
+ // which doesn't exist yet, because it hasn't been added to the scope yet
+ // this missing hook ties the knot without introducing synchronization problems like before
+ return definitions.AnyRefClass
}
info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
super.missingHook(owner, name)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 06a7db6289..54b75b8e5b 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -8,7 +8,7 @@ package runtime
*
* @contentDiagram hideNodes "*Api" "*Extractor"
*/
-class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
+class JavaUniverse extends internal.SymbolTable with JavaUniverseForce with ReflectSetup with runtime.SymbolTable { self =>
override def inform(msg: String): Unit = log(msg)
def picklerPhase = internal.SomePhase
@@ -21,10 +21,82 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+ val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
+
// can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
object treeInfo extends {
val global: JavaUniverse.this.type = JavaUniverse.this
} with internal.TreeInfo
init()
+
+ // ======= Initialization of runtime reflection =======
+ //
+ // This doc describes the carefully laid out sequence of actions used to initialize reflective universes.
+ //
+ // Before reading the text below, read up the section Mirrors in the reflection pre-SIP
+ // https://docs.google.com/document/d/1nAwSw4TmMplsIlzh2shYLUJ5mVh3wndDa1Zm1H6an9A/edit.
+ // Take an especially good look at Figure 2, because it illustrates fundamental principles underlying runtime reflection:
+ // 1) For each universe we have one mirror per classloader
+ // 2) Package symbols are per-mirror
+ // 3) Other symbols are per-universe, which means that a symbol (e.g. Seq on the picture) might be shared between multiple owners
+ //
+ // Main challenges that runtime reflection presents wrt initialization are:
+ // 1) Extravagant completion scheme that enters package members on-demand rather than a result of scanning a directory with class files.
+ // (That's a direct consequence of the fact that in general case we can't enumerate all classes in a classloader.
+ // As Paul rightfully mentioned, we could specialcase classloaders that point to filesystems, but that is left for future work).
+ // 2) Presence of synthetic symbols that aren't loaded by normal means (from classfiles) but are synthesized on-the-fly,
+ // and the necessity to propagate these synthetic symbols from rootMirror to other mirrors,
+ // complicated by the fact that such symbols depend on normal symbols (e.g. AnyRef depends on Object).
+ // 3) Necessity to remain thread-safe, which limits our options related to lazy initialization
+ // (E.g. we cannot use missingHook to enter synthetic symbols, because that's thread-unsafe).
+ //
+ // Directly addressing the challenge #3, we create all synthetic symbols fully in advance during init().
+ // However, it's not that simple as just calling definitions.symbolsNotPresentInBytecode.
+ // Before doing that, we need to first initialize ObjectClass, then ScalaPackageClass, and only then deal with synthetics.
+ // Below you can find a detailed explanation for that.
+ //
+ // ### Why ScalaPackageClass? ###
+ //
+ // Forcing ScalaPackageClass first thing during startup is important, because syntheticCoreClasses such as AnyRefClass
+ // need to be entered into ScalaPackageClass, which entails calling ScalaPackageClass.info.decls.enter.
+ // If ScalaPackageClass isn't initialized by that moment, the following will happen for runtime reflection:
+ // 1) Initialization of ScalaPackageClass will trigger unpickling.
+ // 2) Unpickling will need to load some auxiliary types such as, for example, String.
+ // 3) To load String, runtime reflection will call mirrorDefining(classOf[String]).
+ // 4) This, in turn, will call runtimeMirror(classOf[String].getClassLoader).
+ // 5) For some classloader configurations, the resulting mirror will be different from rootMirror.
+ // 6) In that case, initialization of the resulting mirror will try to import definitions.syntheticCoreClasses into the mirror.
+ // 7) This will force all the lazy vals corresponding to syntheticCoreClasses.
+ // 8) By that time, the completer of ScalaPackageClass will have already called setInfo on ScalaPackageClass, so there won't be any stack overflow.
+ //
+ // So far so good, no crashes, no problems, right? Not quite.
+ // If forcing of ScalaPackageClass was called by a syntheticCoreClasses lazy val,
+ // then this lazy val will be entered twice: once during step 7 and once when returning from the original call.
+ // To avoid this we need to initialize ScalaPackageClass prior to other synthetics.
+ //
+ // ### Why ObjectClass? ###
+ //
+ // 1) As explained in JavaMirrors.missingHook, initialization of ScalaPackageClass critically depends on AnyRefClass.
+ // 2) AnyRefClass is defined as "lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)",
+ // which means that initialization of AnyRefClass depends on ObjectClass.
+ // 3) ObjectClass is defined as "lazy val ObjectClass = getRequiredClass(sn.Object.toString)",
+ // which means that under some classloader configurations (see JavaMirrors.missingHook for more details)
+ // dereferencing ObjectClass might trigger an avalanche of initializations calling back into AnyRefClass
+ // while another AnyRefClass initializer is still on stack.
+ // 4) That will lead to AnyRefClass being entered two times (once when the recursive call returns and once when the original one returns)
+ // 5) That will crash PackageScope.enter that helpfully detects double-enters.
+ //
+ // Therefore, before initializing ScalaPackageClass, we must pre-initialize ObjectClass
+ def init() {
+ definitions.init()
+
+ // workaround for http://groups.google.com/group/scala-internals/browse_thread/thread/97840ba4fd37b52e
+ // constructors are by definition single-threaded, so we initialize all lazy vals (and local object) in advance
+ // in order to avoid deadlocks later (e.g. one thread holds a global reflection lock and waits for definitions.Something to initialize,
+ // whereas another thread holds a definitions.Something initialization lock and needs a global reflection lock to complete the initialization)
+
+ // TODO Convert this into a macro
+ force()
+ }
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
new file mode 100644
index 0000000000..8fd58c42be
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -0,0 +1,496 @@
+// Generated Code, validated by run/t6240-universe-code-gen.scala
+package scala.reflect
+package runtime
+
+trait JavaUniverseForce { self: runtime.JavaUniverse =>
+ def force() {
+ Literal(Constant(42)).duplicate
+ nme.flattenedName()
+ nme.raw
+ WeakTypeTag
+ TypeTag
+ TypeTag.Byte.tpe
+ TypeTag.Short.tpe
+ TypeTag.Char.tpe
+ TypeTag.Int.tpe
+ TypeTag.Long.tpe
+ TypeTag.Float.tpe
+ TypeTag.Double.tpe
+ TypeTag.Boolean.tpe
+ TypeTag.Unit.tpe
+ TypeTag.Any.tpe
+ TypeTag.AnyVal.tpe
+ TypeTag.AnyRef.tpe
+ TypeTag.Object.tpe
+ TypeTag.Nothing.tpe
+ TypeTag.Null.tpe
+
+ this.settings
+ this.treeInfo
+ // inaccessible: this.scala$reflect$runtime$Gil$$gil
+ // inaccessible: this.uniqueLock
+ // inaccessible: this._skolemizationLevel
+ // inaccessible: this._undoLog
+ // inaccessible: this._intersectionWitness
+ // inaccessible: this._volatileRecursions
+ // inaccessible: this._pendingVolatiles
+ // inaccessible: this._subsametypeRecursions
+ // inaccessible: this._pendingSubTypes
+ // inaccessible: this._basetypeRecursions
+ // inaccessible: this._pendingBaseTypes
+ // inaccessible: this._lubResults
+ // inaccessible: this._glbResults
+ // inaccessible: this._indent
+ // inaccessible: this._tostringRecursions
+ // inaccessible: this.atomicIds
+ // inaccessible: this.atomicExistentialIds
+ // inaccessible: this._recursionTable
+ // inaccessible: this.mirrors
+ this.rootMirror
+ this.treeBuild
+ // inaccessible: this.SimpleNameOrdering
+ this.traceSymbols
+ this.perRunCaches
+ this.FixedMirrorTreeCreator
+ this.FixedMirrorTypeCreator
+ this.BackquotedIdentifierAttachment
+ this.CompoundTypeTreeOriginalAttachment
+ this.noPrint
+ this.typeDebug
+ // inaccessible: this.maxFree
+ this.Range
+ // inaccessible: this.posAssigner
+ this.ConsoleWriter
+ this.RefTree
+ this.PackageDef
+ this.ClassDef
+ this.ModuleDef
+ this.ValOrDefDef
+ this.ValDef
+ this.DefDef
+ this.TypeDef
+ this.LabelDef
+ this.ImportSelector
+ this.Import
+ this.Template
+ this.Block
+ this.CaseDef
+ this.Alternative
+ this.Star
+ this.Bind
+ this.UnApply
+ this.ArrayValue
+ this.Function
+ this.Assign
+ this.AssignOrNamedArg
+ this.If
+ this.Match
+ this.Return
+ this.Try
+ this.Throw
+ this.New
+ this.Typed
+ this.TypeApply
+ this.Apply
+ this.ApplyDynamic
+ this.Super
+ this.This
+ this.Select
+ this.Ident
+ this.ReferenceToBoxed
+ this.Literal
+ this.Annotated
+ this.SingletonTypeTree
+ this.SelectFromTypeTree
+ this.CompoundTypeTree
+ this.AppliedTypeTree
+ this.TypeBoundsTree
+ this.ExistentialTypeTree
+ this.TypeTree
+ this.Modifiers
+ this.EmptyTree
+ this.noSelfType
+ this.pendingSuperCall
+ this.emptyValDef
+ this.EmptyTreeTypeSubstituter
+ // inaccessible: this.duplicator
+ this.UnmappableAnnotArg
+ this.LiteralAnnotArg
+ this.ArrayAnnotArg
+ this.NestedAnnotArg
+ this.ScalaSigBytes
+ this.AnnotationInfo
+ this.Annotation
+ this.UnmappableAnnotation
+ this.ErroneousAnnotation
+ this.ThrownException
+ // inaccessible: this.compactify
+ this.tpnme
+ this.fulltpnme
+ this.binarynme
+ this.nme
+ this.sn
+ this.Constant
+ this.definitions
+ this.LookupSucceeded
+ this.LookupAmbiguous
+ this.LookupInaccessible
+ this.LookupNotFound
+ this.Scope
+ this.EmptyScope
+ this.Flag
+ this.KindErrors
+ this.Kind
+ this.ProperTypeKind
+ this.TypeConKind
+ this.inferKind
+ // inaccessible: this.substTypeMapCache
+ this.UnmappableTree
+ this.ErrorType
+ this.WildcardType
+ this.BoundedWildcardType
+ this.NoType
+ this.NoPrefix
+ this.ThisType
+ this.SingleType
+ this.SuperType
+ this.TypeBounds
+ this.CompoundType
+ this.baseClassesCycleMonitor
+ this.RefinedType
+ this.ClassInfoType
+ this.ConstantType
+ this.TypeRef
+ this.MethodType
+ this.NullaryMethodType
+ this.PolyType
+ this.ExistentialType
+ this.OverloadedType
+ this.AntiPolyType
+ this.HasTypeMember
+ this.ArrayTypeRef
+ this.TypeVar
+ this.AnnotatedType
+ this.StaticallyAnnotatedType
+ this.NamedType
+ this.RepeatedType
+ this.ErasedValueType
+ this.GenPolyType
+ this.unwrapToClass
+ this.unwrapToStableClass
+ this.unwrapWrapperTypes
+ this.RecoverableCyclicReference
+ // inaccessible: this._undoLog
+ // inaccessible: this.numericLoBound
+ // inaccessible: this.numericHiBound
+ this.TypeConstraint
+ this.normalizeAliases
+ this.dropSingletonType
+ this.abstractTypesToBounds
+ this.dropIllegalStarTypes
+ this.IsDependentCollector
+ this.ApproximateDependentMap
+ this.wildcardToTypeVarMap
+ this.typeVarToOriginMap
+ this.ErroneousCollector
+ this.adaptToNewRunMap
+ // inaccessible: this.commonOwnerMapObj
+ this.SymbolKind
+ this.NoSymbol
+ this.CyclicReference
+ // inaccessible: this.TypeHistory
+ this.TermName
+ this.TypeName
+ this.BooleanFlag
+ this.WeakTypeTag
+ this.TypeTag
+ this.Expr
+ this.NoMods
+ definitions.JavaLangPackage
+ definitions.JavaLangPackageClass
+ definitions.ScalaPackage
+ definitions.ScalaPackageClass
+ definitions.RuntimePackage
+ definitions.RuntimePackageClass
+ definitions.AnyClass
+ definitions.AnyRefClass
+ definitions.ObjectClass
+ definitions.AnyRefTpe
+ definitions.AnyTpe
+ definitions.AnyValTpe
+ definitions.BoxedUnitTpe
+ definitions.NothingTpe
+ definitions.NullTpe
+ definitions.ObjectTpe
+ definitions.SerializableTpe
+ definitions.StringTpe
+ definitions.ThrowableTpe
+ definitions.ConstantTrue
+ definitions.ConstantFalse
+ definitions.ConstantNull
+ definitions.AnyValClass
+ definitions.RuntimeNothingClass
+ definitions.RuntimeNullClass
+ definitions.NothingClass
+ definitions.NullClass
+ definitions.ClassCastExceptionClass
+ definitions.IndexOutOfBoundsExceptionClass
+ definitions.InvocationTargetExceptionClass
+ definitions.MatchErrorClass
+ definitions.NonLocalReturnControlClass
+ definitions.NullPointerExceptionClass
+ definitions.ThrowableClass
+ definitions.UninitializedErrorClass
+ definitions.UninitializedFieldConstructor
+ definitions.PartialFunctionClass
+ definitions.AbstractPartialFunctionClass
+ definitions.SymbolClass
+ definitions.StringClass
+ definitions.StringModule
+ definitions.ClassClass
+ definitions.DynamicClass
+ definitions.SysPackage
+ definitions.UnqualifiedModules
+ definitions.UnqualifiedOwners
+ definitions.PredefModule
+ definitions.SpecializableModule
+ definitions.GroupOfSpecializable
+ definitions.ScalaRunTimeModule
+ definitions.SymbolModule
+ definitions.Symbol_apply
+ definitions.StringAddClass
+ definitions.ArrowAssocClass
+ definitions.StringAdd_$plus
+ definitions.ScalaNumberClass
+ definitions.TraitSetterAnnotationClass
+ definitions.DelayedInitClass
+ definitions.TypeConstraintClass
+ definitions.SingletonClass
+ definitions.SerializableClass
+ definitions.JavaSerializableClass
+ definitions.ComparableClass
+ definitions.JavaCloneableClass
+ definitions.JavaNumberClass
+ definitions.RemoteInterfaceClass
+ definitions.RemoteExceptionClass
+ definitions.ByNameParamClass
+ definitions.JavaRepeatedParamClass
+ definitions.RepeatedParamClass
+ definitions.ExprClassOf
+ definitions.ConsClass
+ definitions.IteratorClass
+ definitions.IterableClass
+ definitions.ListClass
+ definitions.SeqClass
+ definitions.StringBuilderClass
+ definitions.TraversableClass
+ definitions.ListModule
+ definitions.List_apply
+ definitions.NilModule
+ definitions.SeqModule
+ definitions.ArrayModule
+ definitions.ArrayModule_overloadedApply
+ definitions.ArrayClass
+ definitions.Array_apply
+ definitions.Array_update
+ definitions.Array_length
+ definitions.Array_clone
+ definitions.SoftReferenceClass
+ definitions.MethodClass
+ definitions.EmptyMethodCacheClass
+ definitions.MethodCacheClass
+ definitions.ScalaXmlTopScope
+ definitions.ScalaXmlPackage
+ definitions.ReflectPackage
+ definitions.ReflectApiPackage
+ definitions.ReflectRuntimePackage
+ definitions.PartialManifestClass
+ definitions.PartialManifestModule
+ definitions.FullManifestClass
+ definitions.FullManifestModule
+ definitions.OptManifestClass
+ definitions.NoManifest
+ definitions.TreesClass
+ definitions.TreesTreeType
+ definitions.TreeType
+ definitions.SubtreeType
+ definitions.ExprsClass
+ definitions.ExprClass
+ definitions.ClassTagModule
+ definitions.ClassTagClass
+ definitions.TypeTagsClass
+ definitions.WeakTypeTagClass
+ definitions.WeakTypeTagModule
+ definitions.TypeTagClass
+ definitions.TypeTagModule
+ definitions.ApiUniverseClass
+ definitions.JavaUniverseClass
+ definitions.MirrorClass
+ definitions.TypeCreatorClass
+ definitions.TreeCreatorClass
+ definitions.LiftableClass
+ definitions.MacroClass
+ definitions.MacroContextClass
+ definitions.MacroImplAnnotation
+ definitions.StringContextClass
+ definitions.QuasiquoteClass
+ definitions.QuasiquoteClass_api
+ definitions.QuasiquoteClass_api_apply
+ definitions.QuasiquoteClass_api_unapply
+ definitions.ScalaSignatureAnnotation
+ definitions.ScalaLongSignatureAnnotation
+ definitions.OptionClass
+ definitions.OptionModule
+ definitions.Option_apply
+ definitions.SomeClass
+ definitions.NoneModule
+ definitions.SomeModule
+ definitions.VarArityClass
+ definitions.ProductClass
+ definitions.TupleClass
+ definitions.FunctionClass
+ definitions.AbstractFunctionClass
+ definitions.ProductRootClass
+ definitions.Any_$eq$eq
+ definitions.Any_$bang$eq
+ definitions.Any_equals
+ definitions.Any_hashCode
+ definitions.Any_toString
+ definitions.Any_$hash$hash
+ definitions.Any_getClass
+ definitions.Any_isInstanceOf
+ definitions.Any_asInstanceOf
+ definitions.primitiveGetClassMethods
+ definitions.getClassMethods
+ definitions.Object_$hash$hash
+ definitions.Object_$eq$eq
+ definitions.Object_$bang$eq
+ definitions.Object_eq
+ definitions.Object_ne
+ definitions.Object_isInstanceOf
+ definitions.Object_asInstanceOf
+ definitions.Object_synchronized
+ definitions.String_$plus
+ definitions.ObjectRefClass
+ definitions.VolatileObjectRefClass
+ definitions.RuntimeStaticsModule
+ definitions.BoxesRunTimeModule
+ definitions.BoxesRunTimeClass
+ definitions.BoxedNumberClass
+ definitions.BoxedCharacterClass
+ definitions.BoxedBooleanClass
+ definitions.BoxedByteClass
+ definitions.BoxedShortClass
+ definitions.BoxedIntClass
+ definitions.BoxedLongClass
+ definitions.BoxedFloatClass
+ definitions.BoxedDoubleClass
+ definitions.Boxes_isNumberOrBool
+ definitions.Boxes_isNumber
+ definitions.BoxedUnitClass
+ definitions.BoxedUnitModule
+ definitions.AnnotationClass
+ definitions.ClassfileAnnotationClass
+ definitions.StaticAnnotationClass
+ definitions.BridgeClass
+ definitions.ElidableMethodClass
+ definitions.ImplicitNotFoundClass
+ definitions.MigrationAnnotationClass
+ definitions.ScalaStrictFPAttr
+ definitions.SwitchClass
+ definitions.TailrecClass
+ definitions.VarargsClass
+ definitions.uncheckedStableClass
+ definitions.uncheckedVarianceClass
+ definitions.BeanPropertyAttr
+ definitions.BooleanBeanPropertyAttr
+ definitions.CompileTimeOnlyAttr
+ definitions.DeprecatedAttr
+ definitions.DeprecatedNameAttr
+ definitions.DeprecatedInheritanceAttr
+ definitions.DeprecatedOverridingAttr
+ definitions.NativeAttr
+ definitions.RemoteAttr
+ definitions.ScalaInlineClass
+ definitions.ScalaNoInlineClass
+ definitions.SerialVersionUIDAttr
+ definitions.SerialVersionUIDAnnotation
+ definitions.SpecializedClass
+ definitions.ThrowsClass
+ definitions.TransientAttr
+ definitions.UncheckedClass
+ definitions.UncheckedBoundsClass
+ definitions.UnspecializedClass
+ definitions.VolatileAttr
+ definitions.BeanGetterTargetClass
+ definitions.BeanSetterTargetClass
+ definitions.FieldTargetClass
+ definitions.GetterTargetClass
+ definitions.ParamTargetClass
+ definitions.SetterTargetClass
+ definitions.ObjectTargetClass
+ definitions.ClassTargetClass
+ definitions.MethodTargetClass
+ definitions.LanguageFeatureAnnot
+ definitions.languageFeatureModule
+ definitions.experimentalModule
+ definitions.MacrosFeature
+ definitions.DynamicsFeature
+ definitions.PostfixOpsFeature
+ definitions.ReflectiveCallsFeature
+ definitions.ImplicitConversionsFeature
+ definitions.HigherKindsFeature
+ definitions.ExistentialsFeature
+ definitions.metaAnnotations
+ definitions.AnnotationDefaultAttr
+ definitions.isUnbox
+ definitions.isBox
+ definitions.isPhantomClass
+ definitions.syntheticCoreClasses
+ definitions.syntheticCoreMethods
+ definitions.hijackedCoreClasses
+ definitions.symbolsNotPresentInBytecode
+ definitions.isPossibleSyntheticParent
+ // inaccessible: definitions.boxedValueClassesSet
+ definitions.abbrvTag
+ definitions.numericWeight
+ definitions.boxedModule
+ definitions.boxedClass
+ definitions.refClass
+ definitions.volatileRefClass
+ definitions.boxMethod
+ definitions.unboxMethod
+ definitions.UnitClass
+ definitions.ByteClass
+ definitions.ShortClass
+ definitions.CharClass
+ definitions.IntClass
+ definitions.LongClass
+ definitions.FloatClass
+ definitions.DoubleClass
+ definitions.BooleanClass
+ definitions.Boolean_and
+ definitions.Boolean_or
+ definitions.Boolean_not
+ definitions.UnitTpe
+ definitions.ByteTpe
+ definitions.ShortTpe
+ definitions.CharTpe
+ definitions.IntTpe
+ definitions.LongTpe
+ definitions.FloatTpe
+ definitions.DoubleTpe
+ definitions.BooleanTpe
+ definitions.ScalaNumericValueClasses
+ definitions.ScalaValueClassesNoUnit
+ definitions.ScalaValueClasses
+
+
+ erasure.GenericArray
+ erasure.scalaErasure
+ erasure.specialScalaErasure
+ erasure.javaErasure
+ erasure.verifiedJavaErasure
+ erasure.boxingErasure
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 3e01a6df02..c6059ac402 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -17,37 +17,13 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* is found, a package is created instead.
*/
class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
-// def makePackage() {
-// println("wrong guess; making package "+clazz)
-// val ptpe = newPackageType(module.moduleClass)
-// for (sym <- List(clazz, module, module.moduleClass)) {
-// sym setFlag Flags.PACKAGE
-// sym setInfo ptpe
-// }
-// }
-
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
-// try {
- enteringPhaseNotLaterThan(picklerPhase) {
+ slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
-// } catch {
-// case ex: ClassNotFoundException => makePackage()
-// case ex: NoClassDefFoundError => makePackage()
- // Note: We catch NoClassDefFoundError because there are situations
- // where a package and a class have the same name except for capitalization.
- // It seems in this case the class is loaded even if capitalization differs
- // but then a NoClassDefFound error is issued with a ("wrong name: ...")
- // reason. (I guess this is a concession to Windows).
- // The present behavior is a bit too forgiving, in that it masks
- // all class load errors, not just wrong name errors. We should try
- // to be more discriminating. To get on the right track simply delete
- // the clause above and load a collection class such as collection.Iterable.
- // You'll see an error that class `parallel` has the wrong name.
-// }
}
}
override def load(sym: Symbol) = complete(sym)
@@ -91,12 +67,54 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
}
}
+
+ // Since runtime reflection doesn't have a luxury of enumerating all classes
+ // on the classpath, it has to materialize symbols for top-level definitions
+ // (packages, classes, objects) on demand.
+ //
+ // Someone asks us for a class named `foo.Bar`? Easy. Let's speculatively create
+ // a package named `foo` and then look up `newTypeName("bar")` in its decls.
+ // This lookup, implemented in `SymbolLoaders.PackageScope` tests the waters by
+ // trying to to `Class.forName("foo.Bar")` and then creates a ClassSymbol upon
+ // success (the whole story is a bit longer, but the rest is irrelevant here).
+ //
+ // That's all neat, but these non-deterministic mutations of the global symbol
+ // table give a lot of trouble in multi-threaded setting. One of the popular
+ // reflection crashes happens when multiple threads happen to trigger symbol
+ // materialization multiple times for the same symbol, making subsequent
+ // reflective operations stumble upon outrageous stuff like overloaded packages.
+ //
+ // Short of significantly changing SymbolLoaders I see no other way than just
+ // to slap a global lock on materialization in runtime reflection.
class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
with SynchronizedScope {
assert(pkgClass.isType)
- // disable fingerprinting as we do not know entries beforehand
- private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected.
- override def lookupEntry(name: Name): ScopeEntry = {
+
+ // materializing multiple copies of the same symbol in PackageScope is a very popular bug
+ // this override does its best to guard against it
+ override def enter[T <: Symbol](sym: T): T = {
+ // workaround for SI-7728
+ if (isCompilerUniverse) super.enter(sym)
+ else {
+ val existing = super.lookupEntry(sym.name)
+ // commented out to provide a hotfix for strange class files that javac sometimes emits
+ // see more details at: https://groups.google.com/forum/#!topic/scala-internals/hcnUFk75MgQ
+ // assert(existing == null || existing.sym.isMethod, s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
+ super.enter(sym)
+ }
+ }
+
+ override def enterIfNew[T <: Symbol](sym: T): T = {
+ val existing = super.lookupEntry(sym.name)
+ if (existing == null) enter(sym)
+ else existing.sym.asInstanceOf[T]
+ }
+
+ // package scopes need to synchronize on the GIL
+ // because lookupEntry might cause changes to the global symbol table
+ override def syncLockSynchronized[T](body: => T): T = gilSynchronized(body)
+ private val negatives = new mutable.HashSet[Name]
+ override def lookupEntry(name: Name): ScopeEntry = syncLockSynchronized {
val e = super.lookupEntry(name)
if (e != null)
e
@@ -119,8 +137,21 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val module = origOwner.info decl name.toTermName
assert(clazz != NoSymbol)
assert(module != NoSymbol)
- pkgClass.info.decls enter clazz
- pkgClass.info.decls enter module
+ // currentMirror.mirrorDefining(cls) might side effect by entering symbols into pkgClass.info.decls
+ // therefore, even though in the beginning of this method, super.lookupEntry(name) returned null
+ // entering clazz/module now will result in a double-enter assertion in PackageScope.enter
+ // here's how it might happen
+ // 1) we are the rootMirror
+ // 2) cls.getClassLoader is different from our classloader
+ // 3) mirrorDefining(cls) looks up a mirror corresponding to that classloader and cannot find it
+ // 4) mirrorDefining creates a new mirror
+ // 5) that triggers Mirror.init() of the new mirror
+ // 6) that triggers definitions.syntheticCoreClasses
+ // 7) that might materialize symbols and enter them into our scope (because syntheticCoreClasses live in rootMirror)
+ // 8) now we come back here and try to enter one of the now entered symbols => BAM!
+ // therefore we use enterIfNew rather than just enter
+ enterIfNew(clazz)
+ enterIfNew(module)
(clazz, module)
}
debugInfo(s"created $module/${module.moduleClass} in $pkgClass")
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index bcd4d16cde..ddbf3bd629 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -9,7 +9,7 @@ import scala.reflect.internal.Flags._
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
* a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
*/
-private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
+private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps with Gil with ThreadLocalStorage {
def info(msg: => String) =
if (settings.verbose) println("[reflect-compiler] "+msg)
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 6aa47a0405..c90901410a 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -14,20 +14,25 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
// BaseTypeSeqs
override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
- new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+ // only need to synchronize BaseTypeSeqs if they contain refined types
+ if (elems.filter(_.isInstanceOf[RefinedType]).nonEmpty) new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+ else new BaseTypeSeq(parents, elems)
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
- override def apply(i: Int): Type = synchronized { super.apply(i) }
- override def rawElem(i: Int) = synchronized { super.rawElem(i) }
- override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) }
- override def toList: List[Type] = synchronized { super.toList }
- override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) }
- override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) }
- override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) }
- override lazy val maxDepth = synchronized { maxDepthOfElems }
- override def toString = synchronized { super.toString }
-
- override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ override def apply(i: Int): Type = gilSynchronized { super.apply(i) }
+ override def rawElem(i: Int) = gilSynchronized { super.rawElem(i) }
+ override def typeSymbol(i: Int): Symbol = gilSynchronized { super.typeSymbol(i) }
+ override def toList: List[Type] = gilSynchronized { super.toList }
+ override def copy(head: Type, offset: Int): BaseTypeSeq = gilSynchronized { super.copy(head, offset) }
+ override def map(f: Type => Type): BaseTypeSeq = gilSynchronized { super.map(f) }
+ override def exists(p: Type => Boolean): Boolean = gilSynchronized { super.exists(p) }
+ override lazy val maxDepth = gilSynchronized { maxDepthOfElems }
+ override def toString = gilSynchronized { super.toString }
+
+ override def lateMap(f: Type => Type): BaseTypeSeq =
+ // only need to synchronize BaseTypeSeqs if they contain refined types
+ if (map(f).toList.filter(_.isInstanceOf[RefinedType]).nonEmpty) new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ else new MappedBaseTypeSeq(this, f)
}
// Scopes
@@ -36,15 +41,19 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
trait SynchronizedScope extends Scope {
- override def isEmpty: Boolean = synchronized { super.isEmpty }
- override def size: Int = synchronized { super.size }
- override def enter[T <: Symbol](sym: T): T = synchronized { super.enter(sym) }
- override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
- override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
- override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
- override def lookupAll(name: Name) = synchronized { super.lookupAll(name) }
- override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) }
- override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) }
- override def toList: List[Symbol] = synchronized { super.toList }
+ // we can keep this lock fine-grained, because methods of Scope don't do anything extraordinary, which makes deadlocks impossible
+ // fancy subclasses of internal.Scopes#Scope should do synchronization themselves (e.g. see PackageScope for an example)
+ private lazy val syncLock = new Object
+ def syncLockSynchronized[T](body: => T): T = if (isCompilerUniverse) body else syncLock.synchronized { body }
+ override def isEmpty: Boolean = syncLockSynchronized { super.isEmpty }
+ override def size: Int = syncLockSynchronized { super.size }
+ override def enter[T <: Symbol](sym: T): T = syncLockSynchronized { super.enter(sym) }
+ override def rehash(sym: Symbol, newname: Name) = syncLockSynchronized { super.rehash(sym, newname) }
+ override def unlink(e: ScopeEntry) = syncLockSynchronized { super.unlink(e) }
+ override def unlink(sym: Symbol) = syncLockSynchronized { super.unlink(sym) }
+ override def lookupAll(name: Name) = syncLockSynchronized { super.lookupAll(name) }
+ override def lookupEntry(name: Name) = syncLockSynchronized { super.lookupEntry(name) }
+ override def lookupNextEntry(entry: ScopeEntry) = syncLockSynchronized { super.lookupNextEntry(entry) }
+ override def toList: List[Symbol] = syncLockSynchronized { super.toList }
}
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 98cad45db1..298d0ffebd 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -3,17 +3,23 @@ package reflect
package runtime
import scala.reflect.io.AbstractFile
+import scala.collection.{ immutable, mutable }
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
- override protected def nextId() = synchronized { super.nextId() }
+ private lazy val atomicIds = new java.util.concurrent.atomic.AtomicInteger(0)
+ override protected def nextId() = atomicIds.incrementAndGet()
- override protected def freshExistentialName(suffix: String) =
- synchronized { super.freshExistentialName(suffix) }
+ private lazy val atomicExistentialIds = new java.util.concurrent.atomic.AtomicInteger(0)
+ override protected def nextExistentialId() = atomicExistentialIds.incrementAndGet()
+
+ private lazy val _recursionTable = mkThreadLocalStorage(immutable.Map.empty[Symbol, Int])
+ override def recursionTable = _recursionTable.get
+ override def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable.set(value)
// Set the fields which point companions at one another. Returns the module.
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
- synchronized { super.connectModuleToClass(m, moduleClass) }
+ gilSynchronized { super.connectModuleToClass(m, moduleClass) }
override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
@@ -25,35 +31,40 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
trait SynchronizedSymbol extends Symbol {
- override def rawflags = synchronized { super.rawflags }
- override def rawflags_=(x: Long) = synchronized { super.rawflags_=(x) }
-
- override def rawowner = synchronized { super.rawowner }
- override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
-
- override def validTo = synchronized { super.validTo }
- override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
-
- override def pos = synchronized { super.pos }
- override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
-
- override def privateWithin = synchronized { super.privateWithin }
- override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
-
- override def info = synchronized { super.info }
- override def info_=(info: Type) = synchronized { super.info_=(info) }
- override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
- override def rawInfo: Type = synchronized { super.rawInfo }
-
- override def typeParams: List[Symbol] = synchronized { super.typeParams }
-
- override def reset(completer: Type): this.type = synchronized { super.reset(completer) }
-
- override def infosString: String = synchronized { super.infosString }
-
- override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
- override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
-
+ def gilSynchronizedIfNotInited[T](body: => T): T = {
+ if (isFullyInitialized) body
+ else gilSynchronized { body }
+ }
+
+ override def validTo = gilSynchronizedIfNotInited { super.validTo }
+ override def info = gilSynchronizedIfNotInited { super.info }
+ override def rawInfo: Type = gilSynchronizedIfNotInited { super.rawInfo }
+
+ override def typeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ if (isCompilerUniverse) super.typeParams
+ else {
+ if (isMonomorphicType) Nil
+ else {
+ // analogously to the "info" getter, here we allow for two completions:
+ // one: sourceCompleter to LazyType, two: LazyType to completed type
+ if (validTo == NoPeriod)
+ rawInfo load this
+ if (validTo == NoPeriod)
+ rawInfo load this
+
+ rawInfo.typeParams
+ }
+ }
+ }
+ override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ if (isCompilerUniverse) super.unsafeTypeParams
+ else {
+ if (isMonomorphicType) Nil
+ else rawInfo.typeParams
+ }
+ }
+
+ override def isStable: Boolean = gilSynchronized { super.isStable }
// ------ creators -------------------------------------------------------------------
@@ -90,50 +101,38 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
- override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol = createModuleSymbol(name, pos, newFlags)
+ override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+ createModuleSymbol(name, pos, newFlags)
+
+ override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long) =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
- // TODO
- // override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long)
- // override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long)
+ override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long) =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
}
// ------- subclasses ---------------------------------------------------------------------
- trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol {
- override def name_=(x: Name) = synchronized { super.name_=(x) }
- override def rawname = synchronized { super.rawname }
- override def referenced: Symbol = synchronized { super.referenced }
- override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) }
- }
+ trait SynchronizedTermSymbol extends SynchronizedSymbol
trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
- override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) }
- override def paramss: List[List[Symbol]] = synchronized { super.paramss }
- override def returnType: Type = synchronized { super.returnType }
+ // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
+ // unfortunately we cannot elide this lock, because the cache depends on `pre`
+ private lazy val typeAsMemberOfLock = new Object
+ override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotInited { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
}
+ trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
+
trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol {
- override def name_=(x: Name) = synchronized { super.name_=(x) }
- override def rawname = synchronized { super.rawname }
- override def typeConstructor: Type = synchronized { super.typeConstructor }
- override def tpe_* : Type = synchronized { super.tpe_* }
- override def tpeHK : Type = synchronized { super.tpeHK }
+ // unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on
+ // temporarily assigning NoType to tpeCache to detect cyclic reference errors
+ private lazy val tpeLock = new Object
+ override def tpe_* : Type = gilSynchronizedIfNotInited { tpeLock.synchronized { super.tpe_* } }
}
- trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
- override def associatedFile = synchronized { super.associatedFile }
- override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
- override def thisSym: Symbol = synchronized { super.thisSym }
- override def thisType: Type = synchronized { super.thisType }
- override def typeOfThis: Type = synchronized { super.typeOfThis }
- override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) }
- override def children = synchronized { super.children }
- override def addChild(sym: Symbol) = synchronized { super.addChild(sym) }
- }
+ trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol
- trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
- override def sourceModule = synchronized { super.sourceModule }
- override def implicitMembers: Scope = synchronized { super.implicitMembers }
- }
+ trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index c0146167df..de78e527a7 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -2,8 +2,9 @@ package scala
package reflect
package runtime
-import scala.collection.mutable.WeakHashMap
-import java.lang.ref.WeakReference
+import scala.collection.mutable
+import java.lang.ref.{WeakReference => jWeakRef}
+import scala.ref.{WeakReference => sWeakRef}
import scala.reflect.internal.Depth
/** This trait overrides methods in reflect.internal, bracketing
@@ -14,9 +15,10 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
// No sharing of map objects:
override protected def commonOwnerMap = new CommonOwnerMap
- private object uniqueLock
-
- private val uniques = WeakHashMap[Type, WeakReference[Type]]()
+ // we can keep this lock fine-grained, because super.unique just updates the cache
+ // and, in particular, doesn't call any reflection APIs which makes deadlocks impossible
+ private lazy val uniqueLock = new Object
+ private val uniques = mutable.WeakHashMap[Type, jWeakRef[Type]]()
override def unique[T <: Type](tp: T): T = uniqueLock.synchronized {
// we need to have weak uniques for runtime reflection
// because unlike the normal compiler universe, reflective universe isn't organized in runs
@@ -30,7 +32,7 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
val result = if (inCache.isDefined) inCache.get.get else null
if (result ne null) result.asInstanceOf[T]
else {
- uniques(tp) = new WeakReference(tp)
+ uniques(tp) = new jWeakRef(tp)
tp
}
} else {
@@ -38,47 +40,50 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
}
}
- class SynchronizedUndoLog extends UndoLog {
- private val actualLock = new java.util.concurrent.locks.ReentrantLock
-
- final override def lock(): Unit = actualLock.lock()
- final override def unlock(): Unit = actualLock.unlock()
- }
+ private lazy val _skolemizationLevel = mkThreadLocalStorage(0)
+ override def skolemizationLevel = _skolemizationLevel.get
+ override def skolemizationLevel_=(value: Int) = _skolemizationLevel.set(value)
- override protected def newUndoLog = new SynchronizedUndoLog
+ private lazy val _undoLog = mkThreadLocalStorage(new UndoLog)
+ override def undoLog = _undoLog.get
- override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
- synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
+ private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]())
+ override def intersectionWitness = _intersectionWitness.get
- private object subsametypeLock
+ private lazy val _volatileRecursions = mkThreadLocalStorage(0)
+ override def volatileRecursions = _volatileRecursions.get
+ override def volatileRecursions_=(value: Int) = _volatileRecursions.set(value)
- override def isSameType(tp1: Type, tp2: Type): Boolean =
- subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
+ private lazy val _pendingVolatiles = mkThreadLocalStorage(new mutable.HashSet[Symbol])
+ override def pendingVolatiles = _pendingVolatiles.get
- override def isDifferentType(tp1: Type, tp2: Type): Boolean =
- subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
+ private lazy val _subsametypeRecursions = mkThreadLocalStorage(0)
+ override def subsametypeRecursions = _subsametypeRecursions.get
+ override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value)
- override def isSubType(tp1: Type, tp2: Type, depth: Depth): Boolean =
- subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
+ private lazy val _pendingSubTypes = mkThreadLocalStorage(new mutable.HashSet[SubTypePair])
+ override def pendingSubTypes = _pendingSubTypes.get
- private object lubglbLock
+ private lazy val _basetypeRecursions = mkThreadLocalStorage(0)
+ override def basetypeRecursions = _basetypeRecursions.get
+ override def basetypeRecursions_=(value: Int) = _basetypeRecursions.set(value)
- override def glb(ts: List[Type]): Type =
- lubglbLock.synchronized { super.glb(ts) }
+ private lazy val _pendingBaseTypes = mkThreadLocalStorage(new mutable.HashSet[Type])
+ override def pendingBaseTypes = _pendingBaseTypes.get
- override def lub(ts: List[Type]): Type =
- lubglbLock.synchronized { super.lub(ts) }
+ private lazy val _lubResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+ override def lubResults = _lubResults.get
- private object indentLock
-
- override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
- indentLock.synchronized { super.explain(op, p, tp1, arg2) }
- }
+ private lazy val _glbResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+ override def glbResults = _glbResults.get
- private object toStringLock
+ private lazy val _indent = mkThreadLocalStorage("")
+ override def indent = _indent.get
+ override def indent_=(value: String) = _indent.set(value)
- override protected def typeToString(tpe: Type): String =
- toStringLock.synchronized(super.typeToString(tpe))
+ private lazy val _tostringRecursions = mkThreadLocalStorage(0)
+ override def tostringRecursions = _tostringRecursions.get
+ override def tostringRecursions_=(value: Int) = _tostringRecursions.set(value)
/* The idea of caches is as follows.
* When in reflexive mode, a cache is either null, or one sentinal
@@ -91,18 +96,18 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
*/
override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
- tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
+ gilSynchronized { super.defineUnderlyingOfSingleType(tpe) }
override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
- tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
+ gilSynchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
- tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
+ gilSynchronized { super.defineBaseClassesOfCompoundType(tpe) }
override protected def defineParentsOfTypeRef(tpe: TypeRef) =
- tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
+ gilSynchronized { super.defineParentsOfTypeRef(tpe) }
override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
- tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
+ gilSynchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
}
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
new file mode 100644
index 0000000000..5edc051461
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -0,0 +1,28 @@
+package scala.reflect
+package runtime
+
+import java.lang.Thread._
+
+private[reflect] trait ThreadLocalStorage {
+ self: SymbolTable =>
+
+ // see a discussion at scala-internals for more information:
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/337ce68aa5e51f79
+ trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
+ private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
+ // TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
+ val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ def get: T = {
+ if (values containsKey currentThread) values.get(currentThread)
+ else {
+ val value = initialValue
+ values.putIfAbsent(currentThread, value)
+ value
+ }
+ }
+ def set(newValue: T): Unit = {
+ values.put(currentThread, newValue)
+ }
+ }
+ @inline final def mkThreadLocalStorage[T](x: => T): ThreadLocalStorage[T] = new MyThreadLocalStorage(x)
+}
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
new file mode 100644
index 0000000000..6e2890e536
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
@@ -0,0 +1,68 @@
+package scala.reflect
+package runtime
+
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
+/** A cache that maintains a bijection between Java reflection type `J`
+ * and Scala reflection type `S`.
+ *
+ * The cache is two-way weak (i.e. is powered by weak references),
+ * so that neither Java artifacts prevent Scala artifacts from being garbage collected,
+ * nor the other way around.
+ */
+private[runtime] trait TwoWayCaches { self: SymbolTable =>
+ class TwoWayCache[J, S] {
+
+ private val toScalaMap = new WeakHashMap[J, WeakReference[S]]
+ private val toJavaMap = new WeakHashMap[S, WeakReference[J]]
+
+ def enter(j: J, s: S) = gilSynchronized {
+ // debugInfo("cached: "+j+"/"+s)
+ toScalaMap(j) = new WeakReference(s)
+ toJavaMap(s) = new WeakReference(j)
+ }
+
+ private object SomeRef {
+ def unapply[T](optRef: Option[WeakReference[T]]): Option[T] =
+ if (optRef.nonEmpty) {
+ val result = optRef.get.get
+ if (result != null) Some(result) else None
+ } else None
+ }
+
+ def toScala(key: J)(body: => S): S = gilSynchronized {
+ toScalaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(key, result)
+ result
+ }
+ }
+
+ def toJava(key: S)(body: => J): J = gilSynchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(result, key)
+ result
+ }
+ }
+
+ def toJavaOption(key: S)(body: => Option[J]): Option[J] = gilSynchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ Some(v)
+ case _ =>
+ val result = body
+ for (value <- result) enter(value, key)
+ result
+ }
+ }
+ }
+}
+