summaryrefslogtreecommitdiff
path: root/src/compiler/scala
diff options
context:
space:
mode:
authorEugene Burmako <xeno.by@gmail.com>2013-10-19 06:46:27 -0700
committerEugene Burmako <xeno.by@gmail.com>2013-10-19 06:46:27 -0700
commit8848f241616627b0c5beca38a5107c4eca3e10fd (patch)
tree7f6e5f05e3ab98d86049e493ded1f31be3b4f62d /src/compiler/scala
parentfc892176ec4fd877bae4fc31ad7769ec15bbd858 (diff)
parentd3e04daa658170ffc58f2e1ea3da0f4d55f001a7 (diff)
downloadscala-8848f241616627b0c5beca38a5107c4eca3e10fd.tar.gz
scala-8848f241616627b0c5beca38a5107c4eca3e10fd.tar.bz2
scala-8848f241616627b0c5beca38a5107c4eca3e10fd.zip
Merge pull request #3007 from densh/pull/fresh-name-and-package-support
Add support for packages into quasiquotes and toolbox, improve handling of fresh names, unhardcode quasiquote expansion logic
Diffstat (limited to 'src/compiler/scala')
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala238
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala50
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala40
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala2
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala2
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala32
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala21
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala14
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala113
17 files changed, 321 insertions, 249 deletions
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index e535754a98..c2f14cf0f1 100644
--- a/src/compiler/scala/reflect/macros/contexts/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -4,7 +4,7 @@ package contexts
trait Names {
self: Context =>
- lazy val freshNameCreator = callsiteTyper.context.unit.fresh
+ def freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
freshName()
@@ -16,7 +16,7 @@ trait Names {
freshName[NameType](name)
def freshName(): String =
- freshNameCreator.newName()
+ freshName("fresh$")
def freshName(name: String): String =
freshNameCreator.newName(name)
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index ae6488b5a8..88cfea8157 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -11,7 +11,7 @@ trait Parsers {
val sreporter = new StoreReporter()
val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
val parser = newUnitParser(unit)
- val tree = gen.mkTreeOrBlock(parser.parseStats())
+ val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
sreporter.infos.foreach {
case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 1de5c1f626..df5952a4cf 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,8 +5,7 @@
package scala.tools.nsc
-import util.FreshNameCreator
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
import scala.tools.nsc.reporters.Reporter
@@ -27,10 +26,9 @@ trait CompilationUnits { global: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- val fresh: FreshNameCreator = new FreshNameCreator.Default
-
- def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
+ implicit val fresh: FreshNameCreator = new FreshNameCreator
+ def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
+ def freshTypeName(prefix: String) = global.freshTypeName(prefix)
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 1cd3e0ec4b..1c5354502b 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -110,9 +110,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** A spare instance of TreeBuilder left for backwards compatibility. */
- lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new UnitTreeBuilder {
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new TreeBuilder {
val global: Global.this.type = Global.this;
- val unit = currentUnit
+ def unit = currentUnit
+ def source = currentUnit.source
}
/** Fold constants */
@@ -1049,6 +1050,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentRun: Run = curRun
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+ def currentFreshNameCreator = currentUnit.fresh
def isGlobalInitialized = (
definitions.isDefinitionsInitialized
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 34f3fcce9f..1d5f35b7d6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -13,9 +13,8 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, Position }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
import Tokens._
-import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -41,11 +40,8 @@ trait ParsersCommon extends ScannersCommon { self =>
*/
abstract class ParserCommon {
val in: ScannerCommon
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def deprecationWarning(off: Int, msg: String): Unit
- def accept(token: Int): Int
+ def deprecationWarning(off: Offset, msg: String): Unit
+ def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -164,21 +160,13 @@ self =>
val in = newScanner()
in.init()
- private val globalFresh = new FreshNameCreator.Default
-
def unit = global.currentUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
- def o2p(offset: Int): Position = Position.offset(source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
// suppress warnings; silent abort on errors
- def warning(offset: Int, msg: String) {}
- def deprecationWarning(offset: Int, msg: String) {}
+ def warning(offset: Offset, msg: String) {}
+ def deprecationWarning(offset: Offset, msg: String) {}
- def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
@@ -225,14 +213,11 @@ self =>
override def newScanner() = new UnitScanner(unit, patches)
- override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
- override def warning(offset: Int, msg: String) {
+ override def warning(offset: Offset, msg: String) {
unit.warning(o2p(offset), msg)
}
- override def deprecationWarning(offset: Int, msg: String) {
+ override def deprecationWarning(offset: Offset, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
@@ -250,7 +235,7 @@ self =>
for ((offset, msg) <- syntaxErrors)
unit.error(o2p(offset), msg)
- override def syntaxError(offset: Int, msg: String) {
+ override def syntaxError(offset: Offset, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
else unit.error(o2p(offset), msg)
}
@@ -274,9 +259,10 @@ self =>
}
}
- final val Local = 0
- final val InBlock = 1
- final val InTemplate = 2
+ type Location = Int
+ final val Local: Location = 0
+ final val InBlock: Location = 1
+ final val InTemplate: Location = 2
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
@@ -295,23 +281,26 @@ self =>
abstract class Parser extends ParserCommon { parser =>
val in: Scanner
-
def unit: CompilationUnit
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, mid: Int, end: Int): Position
+ def source: SourceFile
- /** whether a non-continuable syntax error has been seen */
- private var lastErrorOffset : Int = -1
-
- class ParserTreeBuilder extends UnitTreeBuilder {
+ class ParserTreeBuilder extends TreeBuilder {
val global: self.global.type = self.global
def unit = parser.unit
+ def source = parser.source
}
val treeBuilder = new ParserTreeBuilder
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+ implicit def fresh: FreshNameCreator = unit.fresh
+
+ def o2p(offset: Offset): Position = Position.offset(source, offset)
+ def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+ def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
+ def r2p(offset: Offset): Position = r2p(offset, offset)
+
+ /** whether a non-continuable syntax error has been seen */
+ private var lastErrorOffset : Int = -1
/** The types of the context bounds of type parameters of the surrounding class
*/
@@ -344,9 +333,10 @@ self =>
*/
def parse(): Tree = parseRule(_.parseStartRule())
- /** This is alternative entry point for repl, script runner, toolbox and quasiquotes.
+ /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
*/
def parseStats(): List[Tree] = parseRule(_.templateStats())
+ def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
@@ -507,7 +497,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Int) {
+ protected def skip(targetToken: Token) {
var nparens = 0
var nbraces = 0
while (true) {
@@ -535,17 +525,17 @@ self =>
in.nextToken()
}
}
- def warning(offset: Int, msg: String): Unit
+ def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
syntaxError(pos pointOrElse in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String): Unit
+ def syntaxError(offset: Offset, msg: String): Unit
def syntaxError(msg: String, skipIt: Boolean) {
syntaxError(in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
// no more errors on this token.
@@ -569,10 +559,10 @@ self =>
}
def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
- def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
+ def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
- def accept(token: Int): Int = {
+ def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
@@ -632,8 +622,6 @@ self =>
def isAnnotation: Boolean = in.token == AT
- def isCaseDefStart: Boolean = in.token == CASE
-
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -660,14 +648,14 @@ self =>
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
- def isLiteralToken(token: Int) = token match {
+ def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
- def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+ def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -676,7 +664,7 @@ self =>
def isExprIntro: Boolean = isExprIntroToken(in.token)
- def isTypeIntroToken(token: Int): Boolean = token match {
+ def isTypeIntroToken(token: Token): Boolean = token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
@@ -684,7 +672,9 @@ self =>
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
- def isStatSep(token: Int): Boolean =
+ def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+ def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
@@ -699,10 +689,10 @@ self =>
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T = atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T = atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T = atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
+ def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
@@ -740,7 +730,7 @@ self =>
}
/** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
- final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -783,7 +773,7 @@ self =>
}
}
- def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) =
if (treeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
@@ -823,7 +813,7 @@ self =>
def argType(): Tree
def functionArgType(): Tree
- private def tupleInfixType(start: Int) = {
+ private def tupleInfixType(start: Offset) = {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
@@ -1060,7 +1050,7 @@ self =>
t
}
- def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+ def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
if (typeOK && in.token == TYPE) {
in.nextToken()
atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
@@ -1118,7 +1108,7 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = atPos(start) {
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
@@ -1160,7 +1150,7 @@ self =>
/** Consume a USCORE and create a fresh synthetic placeholder param. */
private def freshPlaceholder(): Tree = {
val start = in.offset
- val pname = freshName("x$")
+ val pname = freshTermName()
in.nextToken()
val id = atPos(start)(Ident(pname))
val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
@@ -1215,12 +1205,12 @@ self =>
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Int) {
+ def newLineOptWhenFollowedBy(token: Offset) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Int => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1235,7 +1225,7 @@ self =>
if (in.token == COLON) { in.nextToken(); typ() }
else TypeTree()
- def typeOrInfixType(location: Int): Tree =
+ def typeOrInfixType(location: Location): Tree =
if (location == Local) typ()
else startInfixType()
@@ -1246,7 +1236,7 @@ self =>
* WildcardType ::= `_' TypeBounds
* }}}
*/
- def wildcardType(start: Int) = {
+ def wildcardType(start: Offset) = {
val pname = freshTypeName("_$")
val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
@@ -1272,7 +1262,7 @@ self =>
/* hook for IDE, unlike expression can be stubbed
* don't use for any tree that can be inspected in the parser!
*/
- def statement(location: Int): Tree = expr(location) // !!! still needed?
+ def statement(location: Location): Tree = expr(location) // !!! still needed?
/** {{{
* Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
@@ -1299,9 +1289,9 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = withPlaceholders(expr0(location), isAny = false)
+ def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
- def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+ def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
@@ -1326,7 +1316,7 @@ self =>
in.nextToken()
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
- if (isCaseDefStart) caseClauses()
+ if (in.token == CASE) caseClauses()
else catchFromExpr()
}
}
@@ -1459,7 +1449,7 @@ self =>
* }}}
*/
- def implicitClosure(start: Int, location: Int): Tree = {
+ def implicitClosure(start: Offset, location: Location): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
@@ -1637,7 +1627,7 @@ self =>
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
- if (isCaseDefStart) Match(EmptyTree, caseClauses())
+ if (in.token == CASE) Match(EmptyTree, caseClauses())
else block()
}
}
@@ -1723,7 +1713,7 @@ self =>
while (in.token == IF) enums += makeFilter(in.offset, guard())
}
- def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
+ def makeFilter(start: Offset, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
/* -------- PATTERNS ------------------------------------------- */
@@ -2245,7 +2235,7 @@ self =>
}
}
val nameOffset = in.offset
- // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+ // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
val pname: TypeName = wildcardOrIdent().toTypeName
val param = atPos(start, nameOffset) {
val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2287,7 +2277,7 @@ self =>
t setPos o2p(in.offset)
}
- def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
+ def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2406,7 +2396,7 @@ self =>
* | type [nl] TypeDcl
* }}}
*/
- def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+ def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
@@ -2457,7 +2447,6 @@ self =>
EmptyTree
}
def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
val trees =
makePatDef(newmods,
if (tp.isEmpty) p
@@ -2536,7 +2525,7 @@ self =>
}
}
- def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
var newmods = mods
// contextBoundBuf is for context bounded type parameters of the form
@@ -2619,7 +2608,7 @@ self =>
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
* }}}
*/
- def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
@@ -2652,7 +2641,7 @@ self =>
* | [override] trait TraitDef
* }}}
*/
- def tmplDef(pos: Int, mods: Modifiers): Tree = {
+ def tmplDef(pos: Offset, mods: Modifiers): Tree = {
if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
@@ -2676,7 +2665,7 @@ self =>
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
* }}}
*/
- def classDef(start: Int, mods: Modifiers): ClassDef = {
+ def classDef(start: Offset, mods: Modifiers): ClassDef = {
in.nextToken()
val nameOffset = in.offset
val name = identForType()
@@ -2716,7 +2705,7 @@ self =>
* ObjectDef ::= Id ClassTemplateOpt
* }}}
*/
- def objectDef(start: Int, mods: Modifiers): ModuleDef = {
+ def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
in.nextToken()
val nameOffset = in.offset
val name = ident()
@@ -2741,10 +2730,9 @@ self =>
*/
def packageObjectDef(start: Offset): PackageDef = {
val defn = objectDef(in.offset, NoMods)
- val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
- val pid = atPos(o2p(defn.pos.start))(Ident(defn.name))
-
- makePackaging(start, pid, module :: Nil)
+ val pidPos = o2p(defn.pos.startOrPoint)
+ val pkgPos = r2p(start, pidPos.point)
+ gen.mkPackageObject(defn, pidPos, pkgPos)
}
def packageOrPackageObject(start: Offset): Tree = (
if (in.token == OBJECT)
@@ -2756,7 +2744,7 @@ self =>
)
// TODO - eliminate this and use "def packageObjectDef" (see call site of this
// method for small elaboration.)
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
+ def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
case ModuleDef(mods, name, impl) =>
makePackaging(
start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
@@ -2831,7 +2819,7 @@ self =>
* TraitExtends ::= `extends' | `<:'
* }}}
*/
- def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
@@ -2894,14 +2882,26 @@ self =>
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging. */
- def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
- def makeEmptyPackage(start: Int, stats: List[Tree]): PackageDef = (
+ def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
)
+ def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ def default(tok: Token) =
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+ while (!isStatSeqEnd) {
+ stats ++= stat.applyOrElse(in.token, default)
+ acceptStatSepOpt()
+ }
+ stats.toList
+ }
+
/** {{{
* TopStatSeq ::= TopStat {semi TopStat}
* TopStat ::= Annotations Modifiers TmplDef
@@ -2911,24 +2911,15 @@ self =>
* |
* }}}
*/
- def topStatSeq(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- stats ++= (in.token match {
- case PACKAGE =>
- packageOrPackageObject(in.skipToken()) :: Nil
- case IMPORT =>
- in.flushDoc
- importClause()
- case x if isAnnotation || isTemplateIntro || isModifier =>
- joinComment(topLevelTmplDef :: Nil)
- case _ =>
- if (isStatSep) Nil
- else syntaxErrorOrIncompleteAnd("expected class or object definition", skipIt = true)(Nil)
- })
- acceptStatSepOpt()
- }
- stats.toList
+ def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+ def topStat: PartialFunction[Token, List[Tree]] = {
+ case PACKAGE =>
+ packageOrPackageObject(in.skipToken()) :: Nil
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isAnnotation || isTemplateIntro || isModifier =>
+ joinComment(topLevelTmplDef :: Nil)
}
/** {{{
@@ -2972,25 +2963,20 @@ self =>
* |
* }}}
*/
- def templateStats(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- if (in.token == IMPORT) {
- in.flushDoc
- stats ++= importClause()
- } else if (isDefIntro || isModifier || isAnnotation) {
- stats ++= joinComment(nonLocalDefOrDcl)
- } else if (isExprIntro) {
- in.flushDoc
- stats += statement(InTemplate)
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
- }
- acceptStatSepOpt()
- }
- stats.toList
+ def templateStats(): List[Tree] = statSeq(templateStat)
+ def templateStat: PartialFunction[Token, List[Tree]] = {
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isDefIntro || isModifier || isAnnotation =>
+ joinComment(nonLocalDefOrDcl)
+ case _ if isExprIntro =>
+ in.flushDoc
+ statement(InTemplate) :: Nil
}
+ def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
/** {{{
* RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
@@ -3057,14 +3043,14 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && !isCaseDefStart) {
+ while (!isStatSeqEnd && !isCaseDefEnd) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && !isCaseDefStart) acceptStatSep()
+ if (!isCaseDefEnd) acceptStatSep()
}
else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 6957f85689..b12be1a056 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -21,19 +21,24 @@ trait ScannersCommon {
val global : Global
import global._
+ /** Offset into source character array */
+ type Offset = Int
+
+ type Token = Int
+
trait CommonTokenData {
- def token: Int
+ def token: Token
def name: TermName
}
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def error (off: Int, msg: String): Unit
- def incompleteInputError(off: Int, msg: String): Unit
- def deprecationWarning(off: Int, msg: String): Unit
+ def error(off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
}
- def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+ def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
val high = names.last._1
@@ -48,13 +53,10 @@ trait Scanners extends ScannersCommon {
val global : Global
import global._
- /** Offset into source character array */
- type Offset = Int
-
trait TokenData extends CommonTokenData {
/** the next token */
- var token: Int = EMPTY
+ var token: Token = EMPTY
/** the offset of the first character of the current token */
var offset: Offset = 0
@@ -169,7 +171,7 @@ trait Scanners extends ScannersCommon {
def isAtEnd = charOffset >= buf.length
- def resume(lastCode: Int) = {
+ def resume(lastCode: Token) = {
token = lastCode
if (next.token != EMPTY && !reporter.hasErrors)
syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -194,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Int = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER) {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -225,7 +227,7 @@ trait Scanners extends ScannersCommon {
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
- var sepRegions: List[Int] = List()
+ var sepRegions: List[Token] = List()
// Get next token ------------------------------------------------------------
@@ -583,7 +585,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token start a statement? */
- def inFirstOfStat(token: Int) = token match {
+ def inFirstOfStat(token: Token) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -593,7 +595,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token end a statement? */
- def inLastOfStat(token: Int) = token match {
+ def inLastOfStat(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -1122,7 +1124,7 @@ trait Scanners extends ScannersCommon {
def applyBracePatch(): Boolean = false
/** overridden in UnitScanners */
- def parenBalance(token: Int) = 0
+ def parenBalance(token: Token) = 0
/** overridden in UnitScanners */
def healBraces(): List[BracePatch] = List()
@@ -1137,7 +1139,7 @@ trait Scanners extends ScannersCommon {
// ------------- keyword configuration -----------------------------------
- private val allKeywords = List[(Name, Int)](
+ private val allKeywords = List[(Name, Token)](
nme.ABSTRACTkw -> ABSTRACT,
nme.CASEkw -> CASE,
nme.CATCHkw -> CATCH,
@@ -1191,8 +1193,8 @@ trait Scanners extends ScannersCommon {
nme.MACROkw -> IDENTIFIER,
nme.THENkw -> IDENTIFIER)
- private var kwOffset: Int = -1
- private val kwArray: Array[Int] = {
+ private var kwOffset: Offset = -1
+ private val kwArray: Array[Token] = {
val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
kwOffset = offset
arr
@@ -1203,7 +1205,7 @@ trait Scanners extends ScannersCommon {
// Token representation ----------------------------------------------------
/** Returns the string representation of given token. */
- def token2string(token: Int): String = (token: @switch) match {
+ def token2string(token: Token): String = (token: @switch) match {
case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
case CHARLIT => "character literal"
case INTLIT => "integer literal"
@@ -1234,7 +1236,7 @@ trait Scanners extends ScannersCommon {
}
}
- class MalformedInput(val offset: Int, val msg: String) extends Exception
+ class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
* Useful for looking inside source files that aren not currently compiled to see what's there
@@ -1262,7 +1264,7 @@ trait Scanners extends ScannersCommon {
lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
- override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+ override def parenBalance(token: Token) = parensAnalyzer.balance(token)
override def healBraces(): List[BracePatch] = {
var patches: List[BracePatch] = List()
@@ -1412,7 +1414,7 @@ trait Scanners extends ScannersCommon {
var tabSeen = false
- def line(offset: Int): Int = {
+ def line(offset: Offset): Int = {
def findLine(lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1423,7 +1425,7 @@ trait Scanners extends ScannersCommon {
else findLine(0, lineStart.length - 1)
}
- def column(offset: Int): Int = {
+ def column(offset: Offset): Int = {
var col = 0
var i = offset - 1
while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1485,6 +1487,6 @@ trait Scanners extends ScannersCommon {
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Int, msg: String) {}
+ override def error(offset: Offset, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 59abf99844..28d5aefc2b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,24 +8,21 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
*/
abstract class TreeBuilder {
-
val global: Global
import global._
- def freshName(): Name = freshName("x$")
- def freshTermName(): TermName = freshTermName("x$")
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, point: Int, end: Int): Position
+ implicit def fresh: FreshNameCreator = unit.fresh
+ def o2p(offset: Int): Position = Position.offset(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
@@ -325,7 +322,7 @@ abstract class TreeBuilder {
/* If `pat` is not yet a `Bind` wrap it in one with a fresh name */
def makeBind(pat: Tree): Tree = pat match {
case Bind(_, _) => pat
- case _ => Bind(freshName(), pat) setPos pat.pos
+ case _ => Bind(freshTermName(), pat) setPos pat.pos
}
/* A reference to the name bound in Bind `pat`. */
@@ -416,7 +413,7 @@ abstract class TreeBuilder {
* }
*/
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
- val binder = freshTermName("x")
+ val binder = freshTermName()
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
@@ -520,13 +517,3 @@ abstract class TreeBuilder {
}
}
}
-
-abstract class UnitTreeBuilder extends TreeBuilder {
- import global._
- def unit: CompilationUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
- def o2p(offset: Int): Position = Position.offset(unit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index dea4c46e79..03aad71165 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -289,7 +289,7 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo {
+ val s = context.owner.newValue(unit.freshTermName(), arg.pos, newFlags = ARTIFACT) setInfo {
val tp = if (byName) functionType(Nil, argTpe) else argTpe
uncheckedBounds(tp)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index f3e8ac64f4..f69b8a9697 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -281,7 +281,7 @@ trait PatternTypers {
else TypeBounds.lower(tpSym.tpeHK)
)
// origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds)
skolemBuffer += skolem
logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*)
case tp1 => tp1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 97e9d6ef52..c385e7533a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -3459,8 +3459,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
- if (unapply == QuasiquoteClass_api_unapply) macroExpandUnapply(this, tree, fun, unapply, args, mode, pt)
- else doTypedUnapply(tree, fun0, fun, args, mode, pt)
+ doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
deleted file mode 100644
index e877c990f0..0000000000
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.mutable
-
-trait FreshNameCreator {
- /** Do not call before after type checking ends.
- * PP: I think that directive needs to lose a word somewhere.
- */
- def newName(): String
- def newName(prefix: String): String
-}
-
-object FreshNameCreator {
- class Default extends FreshNameCreator {
- protected var counter = 0
- protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
-
- /**
- * Create a fresh name with the given prefix. It is guaranteed
- * that the returned name has never been returned by a previous
- * call to this function (provided the prefix does not end in a digit).
- */
- def newName(prefix: String): String = {
- val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
- counters(safePrefix) += 1
-
- safePrefix + counters(safePrefix)
- }
- def newName(): String = {
- counter += 1
- "$" + counter + "$"
- }
- }
-}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 09f795a840..e94b7725cd 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -280,7 +280,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def parse(code: String): Tree = {
reporter.reset()
- val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStats())
+ val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStatsOrPackages())
throwIfErrors()
tree
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
index dd849f2bca..f92c9aa845 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -154,7 +154,7 @@ trait Holes { self: Quasiquotes =>
object Hole {
def apply(splicee: Tree, holeCard: Cardinality): Hole = {
- if (splicee.tpe == null) return new Hole(splicee, UnknownLocation, holeCard)
+ if (method == nme.unapply) return new Hole(splicee, UnknownLocation, holeCard)
val (spliceeCard, elementTpe) = parseCardinality(splicee.tpe)
def cantSplice() = {
val holeCardMsg = if (holeCard != NoDot) s" with $holeCard" else ""
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 5a1a25cfa1..0b5ade0b4c 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -4,15 +4,16 @@ package quasiquotes
import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
import scala.tools.nsc.ast.parser.Tokens._
import scala.compat.Platform.EOL
-import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
import scala.collection.mutable.ListBuffer
+import scala.util.Try
/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
* A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
* Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
*/
trait Parsers { self: Quasiquotes =>
- import global._
+ import global.{Try => _, _}
abstract class Parser extends {
val global: self.global.type = self.global
@@ -54,7 +55,13 @@ trait Parsers { self: Quasiquotes =>
def isHole(name: Name): Boolean = holeMap.contains(name)
+ override implicit def fresh: FreshNameCreator = new FreshNameCreator {
+ override def newName(prefix: String) = super.newName(nme.QUASIQUOTE_PREFIX + prefix)
+ }
+
override val treeBuilder = new ParserTreeBuilder {
+ override implicit def fresh: FreshNameCreator = parser.fresh
+
// q"(..$xs)"
override def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree =
Apply(Ident(nme.QUASIQUOTE_TUPLE), trees)
@@ -94,8 +101,6 @@ trait Parsers { self: Quasiquotes =>
override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
- override def isCaseDefStart: Boolean = super.isCaseDefStart || (in.token == EOF)
-
override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
@@ -140,11 +145,18 @@ trait Parsers { self: Quasiquotes =>
case Ident(name) if isHole(name) => true
case _ => false
})
+
+ override def topStat = super.topStat.orElse {
+ case _ if isHole =>
+ val stats = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ stats
+ }
}
}
object TermParser extends Parser {
- def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateStats()) }
+ def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateOrTopStatSeq()) }
}
object TypeParser extends Parser {
@@ -161,4 +173,14 @@ trait Parsers { self: Quasiquotes =>
parser.treeBuilder.patvarTransformer.transform(pat)
}
}
+
+ object FreshName {
+ def unapply(name: Name): Option[String] =
+ name.toString.split("\\$") match {
+ case Array(qq, left, right) if qq + "$" == nme.QUASIQUOTE_PREFIX && Try(right.toInt).isSuccess =>
+ Some(left + "$")
+ case _ =>
+ None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index c2b219ee31..c31d1fcd12 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -32,10 +32,17 @@ trait Placeholders { self: Quasiquotes =>
def appendHole(tree: Tree, cardinality: Cardinality) = {
val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
sb.append(placeholderName)
- holeMap(placeholderName) = Hole(tree, cardinality)
+ val holeTree = if (method == nme.unapply) Bind(placeholderName, Ident(nme.WILDCARD)) else tree
+ holeMap(placeholderName) = Hole(holeTree, cardinality)
}
- foreach2(args, parts.init) { case (tree, (p, pos)) =>
+ val iargs = method match {
+ case nme.apply => args
+ case nme.unapply => List.fill(parts.length - 1)(EmptyTree)
+ case _ => global.abort("unreachable")
+ }
+
+ foreach2(iargs, parts.init) { case (tree, (p, pos)) =>
val (part, cardinality) = parseDots(p)
appendPart(part, pos)
appendHole(tree, cardinality)
@@ -47,7 +54,7 @@ trait Placeholders { self: Quasiquotes =>
}
class HoleMap {
- private val underlying = mutable.ListMap[String, Hole]()
+ private var underlying = immutable.SortedMap[String, Hole]()
private val accessed = mutable.Set[String]()
def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
def contains(key: Name) = underlying.contains(key.toString)
@@ -64,6 +71,7 @@ trait Placeholders { self: Quasiquotes =>
accessed += s
underlying.get(s)
}
+ def toList = underlying.toList
}
// Step 2: Transform vanilla Scala AST into an AST with holes
@@ -146,4 +154,11 @@ trait Placeholders { self: Quasiquotes =>
case _ => None
}
}
+
+ object PackageStatPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case ValDef(NoMods, Placeholder(tree, location, card), Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) => Some((tree, location, card))
+ case _ => None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index 1305e25240..9e98dcbc8b 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -14,8 +14,9 @@ abstract class Quasiquotes extends Parsers
def debug(msg: String): Unit =
if (settings.Yquasiquotedebug.value) println(msg)
- lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
+ lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
+ debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
val parts1 = parts0.map {
case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
@@ -32,7 +33,7 @@ abstract class Quasiquotes extends Parsers
case nme.pq => PatternParser.parse(_)
case other => global.abort(s"Unknown quasiquote flavor: $other")
}
- (universe0, args0, parts1, parse0, reify0)
+ (universe0, args0, parts1, parse0, reify0, method0)
case _ =>
global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
}
@@ -41,11 +42,18 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
+ debug(s"\nmacro application:\n${c.macroApplication}\n")
debug(s"\ncode to parse:\n$code\n")
val tree = parse(code)
debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
- debug(s"reified tree:\n$reified\n")
+ val sreified =
+ reified
+ .toString
+ .replace("scala.reflect.runtime.`package`.universe.build.", "")
+ .replace("scala.reflect.runtime.`package`.universe.", "")
+ .replace("scala.collection.immutable.", "")
+ debug(s"reified tree:\n$sreified\n")
reified
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 18999e8267..3d1ecf95b2 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -7,10 +7,8 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
- import global.build.{SyntacticClassDef, SyntacticTraitDef, SyntacticModuleDef,
- SyntacticDefDef, SyntacticValDef, SyntacticVarDef,
- SyntacticBlock, SyntacticApplied, SyntacticTypeApplied,
- SyntacticFunction, SyntacticNew, SyntacticAssign}
+ import global.build.{Select => _, Ident => _, TypeTree => _, _}
+ import global.treeInfo._
import global.definitions._
import Cardinality._
import universeTypes._
@@ -29,12 +27,89 @@ trait Reifiers { self: Quasiquotes =>
def action = if (isReifyingExpressions) "splice" else "extract"
def holesHaveTypes = isReifyingExpressions
+ /** Map that stores freshly generated names linked to the corresponding names in the reified tree.
+ * This information is used to reify names created by calls to freshTermName and freshTypeName.
+ */
+ var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+
+ /** Wraps expressions into:
+ * a sequence of nested withFreshTermName/withFreshTypeName calls which are required
+ * to force regeneration of randomly generated names on every evaluation of quasiquote.
+ *
+ * Wraps patterns into:
+ * a call into anonymous class' unapply method required by unapply macro expansion:
+ *
+ * new {
+ * def unapply(tree) = tree match {
+ * case pattern if guard => Some(result)
+ * case _ => None
+ * }
+ * }.unapply(<unapply-selector>)
+ *
+ * where pattern corresponds to reified tree and guard represents conjunction of equalities
+ * which check that pairs of names in nameMap.values are equal between each other.
+ */
+ def wrap(tree: Tree) =
+ if (isReifyingExpressions) {
+ nameMap.foldLeft(tree) {
+ case (t, (origname, names)) =>
+ assert(names.size == 1)
+ val FreshName(prefix) = origname
+ val ctor = TermName("withFresh" + (if (origname.isTermName) "TermName" else "TypeName"))
+ // q"$u.build.$ctor($prefix) { ${names.head} => $t }"
+ Apply(Apply(Select(Select(u, nme.build), ctor), List(Literal(Constant(prefix)))),
+ List(Function(List(ValDef(Modifiers(PARAM), names.head, TypeTree(), EmptyTree)), t)))
+ }
+ } else {
+ val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
+ val cases =
+ if(isVarPattern) {
+ val Ident(name) :: Nil = freevars
+ // cq"$name: $treeType => $SomeModule($name)" :: Nil
+ CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))),
+ EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil
+ } else {
+ val (succ, fail) = freevars match {
+ case Nil =>
+ // (q"true", q"false")
+ (Literal(Constant(true)), Literal(Constant(false)))
+ case head :: Nil =>
+ // (q"$SomeModule($head)", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(head)), Ident(NoneModule))
+ case vars =>
+ // (q"$SomeModule((..$vars))", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(SyntacticTuple(vars))), Ident(NoneModule))
+ }
+ val guard =
+ nameMap.collect { case (_, nameset) if nameset.size >= 2 =>
+ nameset.toList.sliding(2).map { case List(n1, n2) =>
+ // q"$n1 == $n2"
+ Apply(Select(Ident(n1), nme.EQ), List(Ident(n2)))
+ }
+ }.flatten.reduceOption[Tree] { (l, r) =>
+ // q"$l && $r"
+ Apply(Select(l, nme.ZAND), List(r))
+ }.getOrElse { EmptyTree }
+ // cq"$tree if $guard => $succ" :: cq"_ => $fail" :: Nil
+ CaseDef(tree, guard, succ) :: CaseDef(Ident(nme.WILDCARD), EmptyTree, fail) :: Nil
+ }
+ // q"new { def unapply(tree: $AnyClass) = tree match { case ..$cases } }.unapply(..$args)"
+ Apply(
+ Select(
+ SyntacticNew(Nil, Nil, noSelfType, List(
+ DefDef(NoMods, nme.unapply, Nil, List(List(ValDef(NoMods, nme.tree, TypeTree(AnyClass.toType), EmptyTree))), TypeTree(),
+ Match(Ident(nme.tree), cases)))),
+ nme.unapply),
+ args)
+ }
+
def reifyFillingHoles(tree: Tree): Tree = {
val reified = reifyTree(tree)
holeMap.unused.foreach { hole =>
c.abort(holeMap(hole).tree.pos, s"Don't know how to $action here")
}
- reified
+ wrap(reified)
}
override def reifyTree(tree: Tree): Tree =
@@ -51,6 +126,7 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, location, _) => reifyCase(tree, location)
case RefineStatPlaceholder(tree, _, _) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, _) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, _) => reifyPackageStat(tree)
case _ => EmptyTree
}
@@ -60,18 +136,23 @@ trait Reifiers { self: Quasiquotes =>
case SyntacticClassDef(mods, name, tparams, constrmods, vparamss, earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, vparamss,
earlyDefs, parents, selfdef, body)
- case SyntacticModuleDef(mods, name, earlyDefs, parents, selfdef, body) =>
- reifyBuildCall(nme.SyntacticModuleDef, mods, name, earlyDefs, parents, selfdef, body)
+ case SyntacticPackageObjectDef(name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticPackageObjectDef, name, earlyDefs, parents, selfdef, body)
+ case SyntacticObjectDef(mods, name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
case SyntacticNew(earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
- case SyntacticValDef(mods, name, tpt, rhs) =>
+ case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
case SyntacticVarDef(mods, name, tpt, rhs) =>
reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
case SyntacticAssign(lhs, rhs) =>
reifyBuildCall(nme.SyntacticAssign, lhs, rhs)
+ case SyntacticApplied(fun, List(args))
+ if args.forall { case Placeholder(_, _, DotDotDot) => false case _ => true } =>
+ reifyBuildCall(nme.SyntacticApply, fun, args)
case SyntacticApplied(fun, argss) if argss.nonEmpty =>
reifyBuildCall(nme.SyntacticApplied, fun, argss)
case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
@@ -94,6 +175,12 @@ trait Reifiers { self: Quasiquotes =>
case Placeholder(tree, location, _) =>
if (holesHaveTypes && !(location.tpe <:< nameType)) c.abort(tree.pos, s"$nameType expected but ${location.tpe} found")
tree
+ case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
+ def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+ def introduceName() = { val n = fresh(); nameMap(name) += n; n}
+ def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
+ if (isReifyingPatterns) result(introduceName())
+ else result(nameMap.get(name).map { _.head }.getOrElse { introduceName() })
case _ =>
super.reifyName(name)
}
@@ -131,6 +218,8 @@ trait Reifiers { self: Quasiquotes =>
def reifyAnnotation(tree: Tree) = tree
+ def reifyPackageStat(tree: Tree) = tree
+
/** Splits list into a list of groups where subsequent elements are considered
* similar by the corresponding function.
*
@@ -185,6 +274,8 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, _, DotDot) => tree
case RefineStatPlaceholder(tree, _, DotDot) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, DotDot) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, DotDot) => reifyPackageStat(tree)
+
case List(Placeholder(tree, _, DotDotDot)) => tree
} {
reify(_)
@@ -201,13 +292,13 @@ trait Reifiers { self: Quasiquotes =>
// to overload the same tree for two different concepts:
// - MUTABLE that is used to override ValDef for vars
// - TRAIT that is used to override ClassDef for traits
- val nonoverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+ val nonOverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
// Traits automatically have ABSTRACT flag assigned to
// them so in that case it's not an explicit flag
val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
- if ((flags & nonoverloadedExplicitFlags) != 0L)
+ if ((flags & nonOverloadedExplicitFlags) != 0L)
c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
}
@@ -280,6 +371,8 @@ trait Reifiers { self: Quasiquotes =>
override def reifyEarlyDef(tree: Tree) = mirrorBuildCall(nme.mkEarlyDef, tree)
override def reifyAnnotation(tree: Tree) = mirrorBuildCall(nme.mkAnnotation, tree)
+
+ override def reifyPackageStat(tree: Tree) = mirrorBuildCall(nme.mkPackageStat, tree)
}
class UnapplyReifier extends Reifier {