summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Evals.scala9
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala28
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala119
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala33
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala32
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala27
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala27
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala97
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala37
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala7
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala21
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala20
-rw-r--r--src/library/scala/collection/immutable/List.scala63
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala16
-rw-r--r--src/library/scala/collection/immutable/Set.scala46
-rw-r--r--src/library/scala/collection/immutable/Stream.scala8
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala15
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala15
-rw-r--r--src/library/scala/concurrent/Future.scala8
-rw-r--r--src/library/scala/util/matching/Regex.scala39
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala22
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala18
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala8
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala93
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala3
-rw-r--r--src/reflect/scala/reflect/internal/FreshNames.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala26
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala8
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala24
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala21
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala34
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala16
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala40
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala4
58 files changed, 769 insertions, 357 deletions
diff --git a/src/compiler/scala/reflect/macros/contexts/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala
index 84928ddf86..180a998c39 100644
--- a/src/compiler/scala/reflect/macros/contexts/Evals.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala
@@ -12,7 +12,12 @@ trait Evals {
private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
def eval[T](expr: Expr[T]): T = {
- val imported = evalImporter.importTree(expr.tree)
- evalToolBox.eval(imported).asInstanceOf[T]
+ expr.tree match {
+ case global.Literal(global.Constant(value)) =>
+ value.asInstanceOf[T]
+ case _ =>
+ val imported = evalImporter.importTree(expr.tree)
+ evalToolBox.eval(imported).asInstanceOf[T]
+ }
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index c2f14cf0f1..299af40b94 100644
--- a/src/compiler/scala/reflect/macros/contexts/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -4,7 +4,9 @@ package contexts
trait Names {
self: Context =>
- def freshNameCreator = callsiteTyper.context.unit.fresh
+ import global._
+
+ def freshNameCreator = globalFreshNameCreator
def fresh(): String =
freshName()
@@ -16,11 +18,25 @@ trait Names {
freshName[NameType](name)
def freshName(): String =
- freshName("fresh$")
-
- def freshName(name: String): String =
- freshNameCreator.newName(name)
+ freshName(nme.FRESH_PREFIX)
+
+ def freshName(name: String): String = {
+ // In comparison with the first version of freshName, current "fresh" names
+ // at least can't clash with legible user-written identifiers and are much less likely to clash with each other.
+ // It is still not good enough however, because the counter gets reset every time we create a new Global.
+ //
+ // This would most certainly cause problems if Scala featured something like introduceTopLevel,
+ // but even for def macros this can lead to unexpected troubles. Imagine that one Global
+ // creates a term of an anonymous type with a member featuring a "fresh" name, and then another Global
+ // imports that term with a wildcard and then generates a "fresh" name of its own. Given unlucky
+ // circumstances these "fresh" names might end up clashing.
+ //
+ // TODO: hopefully SI-7823 will provide an ultimate answer to this problem.
+ // In the meanwhile I will also keep open the original issue: SI-6879 "c.freshName is broken".
+ val sortOfUniqueSuffix = freshNameCreator.newName(nme.FRESH_SUFFIX)
+ name + "$" + sortOfUniqueSuffix
+ }
def freshName[NameType <: Name](name: NameType): NameType =
- name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
+ name.mapName(freshName(_)).asInstanceOf[NameType]
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 0728fff74f..e3d2bf14a0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -33,7 +33,7 @@ trait ParsersCommon extends ScannersCommon { self =>
import global.{currentUnit => _, _}
def newLiteral(const: Any) = Literal(Constant(const))
- def literalUnit = newLiteral(())
+ def literalUnit = gen.mkSyntheticUnit()
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
@@ -837,8 +837,14 @@ self =>
if (samePrecedence)
checkHeadAssoc(leftAssoc)
- def loop(top: Tree): Tree =
- if (canReduce) loop(finishBinaryOp(isExpr, popOpInfo(), top)) else top
+ def loop(top: Tree): Tree = if (canReduce) {
+ val info = popOpInfo()
+ if (!isExpr && info.targs.nonEmpty) {
+ syntaxError(info.offset, "type application is not allowed in pattern")
+ info.targs.foreach(_.setType(ErrorType))
+ }
+ loop(finishBinaryOp(isExpr, info, top))
+ } else top
loop(top)
}
@@ -1219,7 +1225,7 @@ self =>
// Like Swiss cheese, with holes
def stringCheese: Tree = atPos(in.offset) {
val start = in.offset
- val interpolator = in.name
+ val interpolator = in.name.encoded // ident() for INTERPOLATIONID
val partsBuf = new ListBuffer[Tree]
val exprBuf = new ListBuffer[Tree]
@@ -2131,8 +2137,6 @@ self =>
/* -------- PARAMETERS ------------------------------------------- */
- def allowTypelessParams = false
-
/** {{{
* ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
* ParamClause ::= [nl] `(' [Params] `)'
@@ -2147,56 +2151,6 @@ self =>
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
var implicitmod = 0
var caseParam = ofCaseClass
- def param(): ValDef = {
- val start = in.offset
- val annots = annotations(skipNewLines = false)
- var mods = Modifiers(Flags.PARAM)
- if (owner.isTypeName) {
- mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
- in.token match {
- case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token.toLong, tokenRange(in))
- if (v == VAR) mods |= Flags.MUTABLE
- in.nextToken()
- case _ =>
- if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
- if (!caseParam) mods |= Flags.PrivateLocal
- }
- if (caseParam) mods |= Flags.CASEACCESSOR
- }
- val nameOffset = in.offset
- val name = ident()
- var bynamemod = 0
- val tpt =
- if (((settings.YmethodInfer && !owner.isTypeName) || allowTypelessParams) && in.token != COLON) {
- TypeTree()
- } else { // XX-METHOD-INFER
- accept(COLON)
- if (in.token == ARROW) {
- if (owner.isTypeName && !mods.hasLocalFlag)
- syntaxError(
- in.offset,
- (if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", skipIt = false)
- else if (implicitmod != 0)
- syntaxError(
- in.offset,
- "implicit parameters may not be call-by-name", skipIt = false)
- else bynamemod = Flags.BYNAMEPARAM
- }
- paramType()
- }
- val default =
- if (in.token == EQUALS) {
- in.nextToken()
- mods |= Flags.DEFAULTPARAM
- expr()
- } else EmptyTree
- atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
- }
- }
def paramClause(): List[ValDef] = {
if (in.token == RPAREN)
return Nil
@@ -2205,7 +2159,7 @@ self =>
in.nextToken()
implicitmod = Flags.IMPLICIT
}
- commaSeparated(param())
+ commaSeparated(param(owner, implicitmod, caseParam ))
}
val vds = new ListBuffer[List[ValDef]]
val start = in.offset
@@ -2253,6 +2207,57 @@ self =>
}
}
+ def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef = {
+ val start = in.offset
+ val annots = annotations(skipNewLines = false)
+ var mods = Modifiers(Flags.PARAM)
+ if (owner.isTypeName) {
+ mods = modifiers() | Flags.PARAMACCESSOR
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
+ in.token match {
+ case v @ (VAL | VAR) =>
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
+ if (v == VAR) mods |= Flags.MUTABLE
+ in.nextToken()
+ case _ =>
+ if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
+ if (!caseParam) mods |= Flags.PrivateLocal
+ }
+ if (caseParam) mods |= Flags.CASEACCESSOR
+ }
+ val nameOffset = in.offset
+ val name = ident()
+ var bynamemod = 0
+ val tpt =
+ if ((settings.YmethodInfer && !owner.isTypeName) && in.token != COLON) {
+ TypeTree()
+ } else { // XX-METHOD-INFER
+ accept(COLON)
+ if (in.token == ARROW) {
+ if (owner.isTypeName && !mods.hasLocalFlag)
+ syntaxError(
+ in.offset,
+ (if (mods.isMutable) "`var'" else "`val'") +
+ " parameters may not be call-by-name", skipIt = false)
+ else if (implicitmod != 0)
+ syntaxError(
+ in.offset,
+ "implicit parameters may not be call-by-name", skipIt = false)
+ else bynamemod = Flags.BYNAMEPARAM
+ }
+ paramType()
+ }
+ val default =
+ if (in.token == EQUALS) {
+ in.nextToken()
+ mods |= Flags.DEFAULTPARAM
+ expr()
+ } else EmptyTree
+ atPos(start, if (name == nme.ERROR) start else nameOffset) {
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
+ }
+ }
+
/** {{{
* TypeParamClauseOpt ::= [TypeParamClause]
* TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 8011abc1ed..e8d46704c3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -475,14 +475,17 @@ trait Scanners extends ScannersCommon {
if (token == INTERPOLATIONID) {
nextRawChar()
if (ch == '\"') {
- nextRawChar()
- if (ch == '\"') {
+ val lookahead = lookaheadReader
+ lookahead.nextChar()
+ if (lookahead.ch == '\"') {
+ nextRawChar() // now eat it
offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
+ nextChar()
token = STRINGLIT
strVal = ""
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index cfee988efc..525dcffb0c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -56,7 +56,7 @@ abstract class TreeBuilder {
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
- require(isExpr || targs.isEmpty, s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+ require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
def mkSelection(t: Tree) = {
def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 90c15bca61..55f45257dc 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -55,23 +55,26 @@ abstract class Pickler extends SubComponent {
case _ =>
}
}
- // If there are any erroneous types in the tree, then we will crash
- // when we pickle it: so let's report an error instead. We know next
- // to nothing about what happened, but our supposition is a lot better
- // than "bad type: <error>" in terms of explanatory power.
- for (t <- unit.body) {
- if (t.isErroneous) {
- unit.error(t.pos, "erroneous or inaccessible type")
- return
- }
- if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
- unit.error(t.pos, "macro has not been expanded")
- return
- }
+ try {
+ pickle(unit.body)
+ } catch {
+ case e: FatalError =>
+ for (t <- unit.body) {
+ // If there are any erroneous types in the tree, then we will crash
+ // when we pickle it: so let's report an error instead. We know next
+ // to nothing about what happened, but our supposition is a lot better
+ // than "bad type: <error>" in terms of explanatory power.
+ //
+ // OPT: do this only as a recovery after fatal error. Checking in advance was expensive.
+ if (t.isErroneous) {
+ if (settings.debug) e.printStackTrace()
+ unit.error(t.pos, "erroneous or inaccessible type")
+ return
+ }
+ }
+ throw e
}
-
- pickle(unit.body)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 4bbfc945f6..a60310f900 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -522,6 +522,8 @@ abstract class Erasure extends AddInterfaces
class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
val typer = this.asInstanceOf[analyzer.Typer]
+ override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
+
/** Replace member references as follows:
*
* - `x == y` for == in class Any becomes `x equals y` with equals in class Object.
@@ -760,7 +762,7 @@ abstract class Erasure extends AddInterfaces
|| super.exclude(sym)
|| !sym.hasTypeAt(currentRun.refchecksPhase.id)
)
- override def matches(sym1: Symbol, sym2: Symbol) = true
+ override def matches(lo: Symbol, high: Symbol) = true
}
def isErasureDoubleDef(pair: SymbolPair) = {
import pair._
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 4222c4d8c8..870eafbf20 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -24,15 +24,16 @@ abstract class OverridingPairs extends SymbolPairs {
/** Symbols to exclude: Here these are constructors and private/artifact symbols,
* including bridges. But it may be refined in subclasses.
*/
- override protected def exclude(sym: Symbol) = (sym hasFlag PRIVATE | ARTIFACT) || sym.isConstructor
+ override protected def exclude(sym: Symbol) = sym.isPrivateLocal || sym.isArtifact || sym.isConstructor
/** Types always match. Term symbols match if their member types
* relative to `self` match.
*/
- override protected def matches(sym1: Symbol, sym2: Symbol) = sym1.isType || (
- (sym1.owner != sym2.owner)
- && !exclude(sym2)
- && relatively.matches(sym1, sym2)
- )
+ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
+ (lo.owner != high.owner) // don't try to form pairs from overloaded members
+ && !high.isPrivate // private or private[this] members never are overriden
+ && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
+ && relatively.matches(lo, high)
+ ) // TODO we don't call exclude(high), should we?
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 3791af1629..c59b726076 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -306,6 +306,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
+ *
+ * !!! Is this safe in the face of the following?
+ * scala> trait T { def foo[A] = 0}; object O extends T { override def foo[B] = 0 }
*/
private def specializedName(sym: Symbol, env: TypeEnv): TermName = {
val tvars = (
@@ -391,13 +394,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* enclosing member with the annotation.
*/
private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
- !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+ !hasUnspecializableAnnotation(sym) && (
specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
|| sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
|| isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
)
)
+ private def hasUnspecializableAnnotation(sym: Symbol): Boolean =
+ sym.ownerChain.exists(_ hasAnnotation UnspecializedClass)
+
def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
case NormalizedMember(_) => true
case _ => false
@@ -433,10 +439,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else
specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if tp.isSpecialized => arg })
- case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: tparams.map(_.info))
+ case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: mapList(tparams)(symInfo)) // OPT
// since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
case NullaryMethodType(resTpe) => specializedTypeVars(resTpe)
- case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
+ case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: mapList(argSyms)(symTpe)) // OPT
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
@@ -907,16 +913,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (sym.isMethod) {
- val stvars = specializedTypeVars(sym)
- if (stvars.nonEmpty)
- debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
+ if (hasUnspecializableAnnotation(sym)) {
+ List()
+ } else {
+ val stvars = specializedTypeVars(sym)
+ if (stvars.nonEmpty)
+ debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
- val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
- val tps2 = tps1 filter stvars
- if (!sym.isDeferred)
- addConcreteSpecMethod(sym)
+ val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
+ val tps2 = tps1 filter stvars
+ if (!sym.isDeferred)
+ addConcreteSpecMethod(sym)
- specializeOn(tps2)
+ specializeOn(tps2)
+ }
}
else Nil
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index b471d16ddd..5973c70583 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -128,6 +128,7 @@ abstract class TailCalls extends Transform {
logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+
}
object EmptyTailContext extends TailContext {
@@ -185,6 +186,18 @@ abstract class TailCalls extends Transform {
private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
+ override def transformUnit(unit: CompilationUnit): Unit = {
+ try {
+ super.transformUnit(unit)
+ } finally {
+ // OPT clear these after each compilation unit
+ failPositions.clear()
+ failReasons.clear()
+ accessed.clear()
+ }
+ }
+
/** Rewrite this tree to contain no tail recursive calls */
def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
@@ -218,12 +231,12 @@ abstract class TailCalls extends Transform {
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
- failReasons(ctx) = reason
+ if (ctx.isMandatory) failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
/* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- failPositions(ctx) = fun.pos
+ if (ctx.isMandatory) failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
@@ -237,7 +250,8 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
+ if (ctx.isMandatory && receiverIsSuper) // OPT expensive check, avoid unless we will actually report the error
+ failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
@@ -245,6 +259,11 @@ abstract class TailCalls extends Transform {
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
+
+ def isEligible(tree: DefDef) = {
+ val sym = tree.symbol
+ !(sym.hasAccessorFlag || sym.isConstructor)
+ }
tree match {
case ValDef(_, _, _, _) =>
@@ -253,7 +272,7 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
val newCtx = new DefDefTailContext(dd)
if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 94f8f509fc..b899cd8994 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -186,7 +186,7 @@ trait Checkable {
* additional conditions holds:
* - either A or B is effectively final
* - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin)
- * - both A and B are sealed, and every possible pairing of their children is irreconcilable
+ * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable
*
* TODO: the last two conditions of the last possibility (that the symbols are not of
* classes being compiled in the current run) are because this currently runs too early,
@@ -198,8 +198,9 @@ trait Checkable {
isEffectivelyFinal(sym1) // initialization important
|| isEffectivelyFinal(sym2)
|| !sym1.isTrait && !sym2.isTrait
- || sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+ || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
)
+ private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal
private def isEffectivelyFinal(sym: Symbol): Boolean = (
// initialization important
sym.initialize.isEffectivelyFinal || (
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4d0eda2377..2043eb5d5d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -418,7 +418,7 @@ trait ContextErrors {
case TypeRef(_, _, arg :: _) if arg.typeSymbol == TupleClass(funArity) && funArity > 1 =>
sm"""|
|Note: The expected type requires a one-argument function accepting a $funArity-Tuple.
- | Consider a pattern matching anoynmous function, `{ case $example => ... }`"""
+ | Consider a pattern matching anonymous function, `{ case $example => ... }`"""
case _ => ""
}
case _ => ""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 598b12b00d..e5907e1a0f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -1303,7 +1303,7 @@ trait Contexts { self: Analyzer =>
var renamed = false
var selectors = tree.selectors
def current = selectors.head
- while (selectors.nonEmpty && result == NoSymbol) {
+ while ((selectors ne Nil) && result == NoSymbol) {
if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
if (name.isTypeName) current.name.toTypeName else current.name)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 91321d4700..776920ed42 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -137,10 +137,6 @@ trait Implicits {
private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 }
- private def isInvalidConversionTarget(tpe: Type): Boolean = tpe match {
- case Function1(_, out) => AnyRefClass.tpe <:< out
- case _ => false
- }
private def isInvalidConversionSource(tpe: Type): Boolean = tpe match {
case Function1(in, _) => in <:< NullClass.tpe
case _ => false
@@ -629,7 +625,8 @@ trait Implicits {
// for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???`
// then Scaladoc will give us something of type `C[T]`, and it would like to know
// that `conv` is potentially available under such and such conditions
- case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc => applyImplicitArgs(tree)
+ case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc =>
+ applyImplicitArgs(tree)
case tree => tree
}
case _ => fallback
@@ -791,7 +788,7 @@ trait Implicits {
final class LocalShadower extends Shadower {
val shadowed = util.HashSet[Name](512)
def addInfos(infos: Infos) {
- shadowed addEntries infos.map(_.name)
+ infos.foreach(i => shadowed.addEntry(i.name))
}
def isShadowed(name: Name) = shadowed(name)
}
@@ -808,7 +805,6 @@ trait Implicits {
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
|| isView && (info.sym eq Predef_conforms)
- || shadower.isShadowed(info.name)
|| (!context.macrosEnabled && info.sym.isTermMacro)
)
@@ -817,6 +813,7 @@ trait Implicits {
def survives(info: ImplicitInfo) = (
!isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded
&& isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt
+ && !shadower.isShadowed(info.name) // OPT rare, only check for plausible candidates
&& matchesPt(info) // stable and matches expected type
)
/** The implicits that are not valid because they come later in the source and
@@ -1361,11 +1358,17 @@ trait Implicits {
if (context.ambiguousErrors)
context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
}
- if (isInvalidConversionTarget(pt)) {
- maybeInvalidConversionError("the result type of an implicit conversion must be more specific than AnyRef")
- result = SearchFailure
+ pt match {
+ case Function1(_, out) =>
+ def prohibit(sym: Symbol) = if (sym.tpe <:< out) {
+ maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}")
+ result = SearchFailure
+ }
+ prohibit(AnyRefClass)
+ if (settings.isScala211) prohibit(AnyValClass)
+ case _ => false
}
- else if (settings.isScala211 && isInvalidConversionSource(pt)) {
+ if (settings.isScala211 && isInvalidConversionSource(pt)) {
maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
result = SearchFailure
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 27e8698676..645f267a21 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1069,8 +1069,9 @@ trait Namers extends MethodSynthesis {
}
def overriddenSymbol(resTp: Type) = {
+ lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala
intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
+ sym != NoSymbol && (site.memberType(sym) matches schema)
}
}
// TODO: see whether this or something similar would work instead:
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 68d724b6fc..f7684b93af 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1596,6 +1596,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkUnexpandedMacro(t: Tree) =
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro)
+ unit.error(t.pos, "macro has not been expanded")
+
override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
@@ -1755,6 +1759,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
varianceValidator.traverse(tt.original) // See SI-7872
case _ =>
}
+
+ checkUnexpandedMacro(result)
+
result
} catch {
case ex: TypeError =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 995f98cc2c..57f27a05fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -46,7 +46,7 @@ trait StdAttachments {
* The parameter is of type `Any`, because macros can expand both into trees and into annotations.
*/
def hasMacroExpansionAttachment(any: Any): Boolean = any match {
- case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case tree: Tree => tree.hasAttachment[MacroExpansionAttachment]
case _ => false
}
@@ -96,7 +96,7 @@ trait StdAttachments {
*/
def isMacroExpansionSuppressed(tree: Tree): Boolean =
( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812
- || tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined
+ || tree.hasAttachment[SuppressMacroExpansionAttachment.type]
|| (tree match {
// we have to account for the fact that during typechecking an expandee might become wrapped,
// i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
@@ -150,7 +150,7 @@ trait StdAttachments {
/** Determines whether a tree should or should not be adapted,
* because someone has put MacroImplRefAttachment on it.
*/
- def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
+ def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type]
/** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected
* from typedNamedApply, the applyDynamicNamed argument rewriter, the latter
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 9776b1e80e..101e1526fe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -579,7 +579,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+
// Side effect time! Don't be an idiot like me and think you
// can move "val sym = tree.symbol" before this line, because
// inferExprAlternative side-effects the tree's symbol.
@@ -992,7 +993,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptMismatchedSkolems() = {
def canIgnoreMismatch = (
!context.reportErrors && isPastTyper
- || tree.attachments.get[MacroExpansionAttachment].isDefined
+ || tree.hasAttachment[MacroExpansionAttachment]
)
def bound = pt match {
case ExistentialType(qs, _) => qs
@@ -2232,14 +2233,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
DeprecatedParamNameError(p, n)
}
}
- }
- if (meth.isStructuralRefinementMember)
- checkMethodStructuralCompatible(ddef)
+ if (meth.isStructuralRefinementMember)
+ checkMethodStructuralCompatible(ddef)
- if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
- case List(param) :: _ if !param.isImplicit =>
- checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
- case _ =>
+ if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+ case List(param) :: _ if !param.isImplicit =>
+ checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+ case _ =>
+ }
}
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
@@ -3411,7 +3412,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen stableTypeFor arg orElse arg.tpe))
+ val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
@@ -3831,7 +3832,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// lifted out of typed1 because it's needed in typedImplicit0
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
- inferPolyAlternatives(fun, args map (_.tpe))
+ inferPolyAlternatives(fun, mapList(args)(treeTpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
@@ -3851,7 +3852,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
- val targs = args map (_.tpe)
+ val targs = mapList(args)(treeTpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (isPredefClassOf(fun.symbol))
typedClassOf(tree, args.head, noGen = true)
@@ -4871,7 +4872,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typedHigherKindedType(arg, mode, pt)
}
- val argtypes = args1 map (_.tpe)
+ val argtypes = mapList(args1)(treeTpe)
foreach2(args, tparams) { (arg, tparam) =>
// note: can't use args1 in selector, because Binds got replaced
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
index 8a54519401..2027d43264 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -31,26 +31,28 @@ trait Holes { self: Quasiquotes =>
import definitions._
import universeTypes._
- protected lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
- protected def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
- protected def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
- protected def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
- protected def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
- protected def isNativeType(tpe: Type) =
+ private lazy val IterableTParam = IterableClass.typeParams(0).asType.toType
+ private def inferParamImplicit(tfun: Type, targ: Type) = c.inferImplicitValue(appliedType(tfun, List(targ)), silent = true)
+ private def inferLiftable(tpe: Type): Tree = inferParamImplicit(liftableType, tpe)
+ private def inferUnliftable(tpe: Type): Tree = inferParamImplicit(unliftableType, tpe)
+ private def isLiftableType(tpe: Type) = inferLiftable(tpe) != EmptyTree
+ private def isNativeType(tpe: Type) =
(tpe <:< treeType) || (tpe <:< nameType) || (tpe <:< modsType) ||
(tpe <:< flagsType) || (tpe <:< symbolType)
- protected def isBottomType(tpe: Type) =
+ private def isBottomType(tpe: Type) =
tpe <:< NothingClass.tpe || tpe <:< NullClass.tpe
- protected def stripIterable(tpe: Type, limit: Option[Cardinality] = None): (Cardinality, Type) =
+ private def extractIterableTParam(tpe: Type) =
+ IterableTParam.asSeenFrom(tpe, IterableClass)
+ private def stripIterable(tpe: Type, limit: Option[Cardinality] = None): (Cardinality, Type) =
if (limit.map { _ == NoDot }.getOrElse { false }) (NoDot, tpe)
else if (tpe != null && !isIterableType(tpe)) (NoDot, tpe)
else if (isBottomType(tpe)) (NoDot, tpe)
else {
- val targ = IterableTParam.asSeenFrom(tpe, IterableClass)
+ val targ = extractIterableTParam(tpe)
val (card, innerTpe) = stripIterable(targ, limit.map { _.pred })
(card.succ, innerTpe)
}
- protected def iterableTypeFromCard(n: Cardinality, tpe: Type): Type = {
+ private def iterableTypeFromCard(n: Cardinality, tpe: Type): Type = {
if (n == NoDot) tpe
else appliedType(IterableClass.toType, List(iterableTypeFromCard(n.pred, tpe)))
}
@@ -74,8 +76,7 @@ trait Holes { self: Quasiquotes =>
class ApplyHole(card: Cardinality, splicee: Tree) extends Hole {
val (strippedTpe, tpe): (Type, Type) = {
- if (stripIterable(splicee.tpe)._1.value < card.value) cantSplice()
- val (_, strippedTpe) = stripIterable(splicee.tpe, limit = Some(card))
+ val (strippedCard, strippedTpe) = stripIterable(splicee.tpe, limit = Some(card))
if (isBottomType(strippedTpe)) cantSplice()
else if (isNativeType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, strippedTpe))
else if (isLiftableType(strippedTpe)) (strippedTpe, iterableTypeFromCard(card, treeType))
@@ -88,14 +89,14 @@ trait Holes { self: Quasiquotes =>
else if (isLiftableType(itpe)) lifted(itpe)(tree)
else global.abort("unreachable")
if (card == NoDot) inner(strippedTpe)(splicee)
- else iterated(card, strippedTpe, inner(strippedTpe))(splicee)
+ else iterated(card, splicee, splicee.tpe)
}
val pos = splicee.pos
val cardinality = stripIterable(tpe)._1
- protected def cantSplice(): Nothing = {
+ private def cantSplice(): Nothing = {
val (iterableCard, iterableType) = stripIterable(splicee.tpe)
val holeCardMsg = if (card != NoDot) s" with $card" else ""
val action = "splice " + splicee.tpe + holeCardMsg
@@ -111,28 +112,66 @@ trait Holes { self: Quasiquotes =>
c.abort(splicee.pos, s"Can't $action, $advice")
}
- protected def lifted(tpe: Type)(tree: Tree): Tree = {
+ private def lifted(tpe: Type)(tree: Tree): Tree = {
val lifter = inferLiftable(tpe)
assert(lifter != EmptyTree, s"couldnt find a liftable for $tpe")
val lifted = Apply(lifter, List(tree))
atPos(tree.pos)(lifted)
}
- protected def iterated(card: Cardinality, tpe: Type, elementTransform: Tree => Tree = identity)(tree: Tree): Tree = {
- assert(card != NoDot)
- def reifyIterable(tree: Tree, n: Cardinality): Tree = {
- def loop(tree: Tree, n: Cardinality): Tree =
- if (n == NoDot) elementTransform(tree)
- else {
- val x: TermName = c.freshName()
- val wrapped = reifyIterable(Ident(x), n.pred)
- val xToWrapped = Function(List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)), wrapped)
- Select(Apply(Select(tree, nme.map), List(xToWrapped)), nme.toList)
- }
- if (tree.tpe != null && (tree.tpe <:< listTreeType || tree.tpe <:< listListTreeType)) tree
- else atPos(tree.pos)(loop(tree, n))
+ private def toStats(tree: Tree): Tree =
+ // q"$u.build.toStats($tree)"
+ Apply(Select(Select(u, nme.build), nme.toStats), tree :: Nil)
+
+ private def toList(tree: Tree, tpe: Type): Tree =
+ if (isListType(tpe)) tree
+ else Select(tree, nme.toList)
+
+ private def mapF(tree: Tree, f: Tree => Tree): Tree =
+ if (f(Ident(TermName("x"))) equalsStructure Ident(TermName("x"))) tree
+ else {
+ val x: TermName = c.freshName()
+ // q"$tree.map { $x => ${f(Ident(x))} }"
+ Apply(Select(tree, nme.map),
+ Function(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree) :: Nil,
+ f(Ident(x))) :: Nil)
}
- reifyIterable(tree, card)
+
+ private object IterableType {
+ def unapply(tpe: Type): Option[Type] =
+ if (isIterableType(tpe)) Some(extractIterableTParam(tpe)) else None
+ }
+
+ private object LiftedType {
+ def unapply(tpe: Type): Option[Tree => Tree] =
+ if (tpe <:< treeType) Some(t => t)
+ else if (isLiftableType(tpe)) Some(lifted(tpe)(_))
+ else None
+ }
+
+ /** Map high-cardinality splice onto an expression that eveluates as a list of given cardinality.
+ *
+ * All possible combinations of representations are given in the table below:
+ *
+ * input output for T <: Tree output for T: Liftable
+ *
+ * ..${x: Iterable[T]} x.toList x.toList.map(lift)
+ * ..${x: T} toStats(x) toStats(lift(x))
+ *
+ * ...${x: Iterable[Iterable[T]]} x.toList { _.toList } x.toList.map { _.toList.map(lift) }
+ * ...${x: Iterable[T]} x.toList.map { toStats(_) } x.toList.map { toStats(lift(_)) }
+ * ...${x: T} toStats(x).map { toStats(_) } toStats(lift(x)).map { toStats(_) }
+ *
+ * For optimization purposes `x.toList` is represented as just `x` if it is statically known that
+ * x is not just an Iterable[T] but a List[T]. Similarly no mapping is performed if mapping function is
+ * known to be an identity.
+ */
+ private def iterated(card: Cardinality, tree: Tree, tpe: Type): Tree = (card, tpe) match {
+ case (DotDot, tpe @ IterableType(LiftedType(lift))) => mapF(toList(tree, tpe), lift)
+ case (DotDot, LiftedType(lift)) => toStats(lift(tree))
+ case (DotDotDot, tpe @ IterableType(inner)) => mapF(toList(tree, tpe), t => iterated(DotDot, t, inner))
+ case (DotDotDot, LiftedType(lift)) => mapF(toStats(lift(tree)), toStats)
+ case _ => global.abort("unreachable")
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 1bd9323752..fcb8734644 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -80,8 +80,20 @@ trait Parsers { self: Quasiquotes =>
}
import treeBuilder.{global => _, unit => _, _}
+ def quasiquoteParam(name: Name, flags: FlagSet = NoFlags) =
+ ValDef(Modifiers(flags), name.toTermName, Ident(tpnme.QUASIQUOTE_PARAM), EmptyTree)
+
// q"def foo($x)"
- override def allowTypelessParams = true
+ override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
+ if (isHole && lookingAhead { in.token == COMMA || in.token == RPAREN }) {
+ quasiquoteParam(ident(), implicitmod)
+ } else super.param(owner, implicitmod, caseParam)
+
+ // q"($x) => ..." && q"class X { selfie => }
+ override def convertToParam(tree: Tree): ValDef = tree match {
+ case Ident(name) if isHole(name) => quasiquoteParam(name)
+ case _ => super.convertToParam(tree)
+ }
// q"foo match { case $x }"
override def caseClause(): CaseDef =
@@ -160,17 +172,26 @@ trait Parsers { self: Quasiquotes =>
}
}
- object TermParser extends Parser {
- def entryPoint = { parser =>
- parser.templateOrTopStatSeq() match {
- case head :: Nil => Block(Nil, head)
- case lst => gen.mkTreeOrBlock(lst)
- }
+ /** Wrapper around tree parsed in q"..." quote. Needed to support ..$ splicing on top-level. */
+ object Q {
+ def apply(tree: Tree): Block = Block(Nil, tree).updateAttachment(Q)
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Block(Nil, contents) if tree.hasAttachment[Q.type] => Some(contents)
+ case _ => None
}
}
+ object TermParser extends Parser {
+ def entryPoint = parser => Q(gen.mkTreeOrBlock(parser.templateOrTopStatSeq()))
+ }
+
object TypeParser extends Parser {
- def entryPoint = _.typ()
+ def entryPoint = { parser =>
+ if (parser.in.token == EOF)
+ TypeTree()
+ else
+ parser.typ()
+ }
}
object CaseParser extends Parser {
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index bdb44ad9a2..5669ec731f 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -95,7 +95,6 @@ trait Placeholders { self: Quasiquotes =>
case Ident(name) => name
case Bind(name, Ident(nme.WILDCARD)) => name
case TypeDef(_, name, List(), TypeBoundsTree(EmptyTree, EmptyTree)) => name
- case ValDef(_, name, TypeTree(), EmptyTree) => name
}
}
@@ -111,6 +110,12 @@ trait Placeholders { self: Quasiquotes =>
}
}
+ object ParamPlaceholder extends HolePlaceholder {
+ def matching = {
+ case ValDef(_, name, Ident(tpnme.QUASIQUOTE_PARAM), EmptyTree) => name
+ }
+ }
+
object TuplePlaceholder {
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case Apply(Ident(nme.QUASIQUOTE_TUPLE), args) => Some(args)
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 87ab52414c..273245f7bd 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -137,6 +137,7 @@ trait Reifiers { self: Quasiquotes =>
case RefineStatPlaceholder(hole) => reifyRefineStat(hole)
case EarlyDefPlaceholder(hole) => reifyEarlyDef(hole)
case PackageStatPlaceholder(hole) => reifyPackageStat(hole)
+ case ParamPlaceholder(hole) => hole.tree
// for enumerators are checked not during splicing but during
// desugaring of the for loop in SyntacticFor & SyntacticForYield
case ForEnumPlaceholder(hole) => hole.tree
@@ -159,8 +160,12 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
case SyntacticNew(earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
- case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
+ case SyntacticDefDef(mods, name, tparams, build.ImplicitParams(vparamss, implparams), tpt, rhs) =>
+ if (implparams.nonEmpty)
+ mirrorBuildCall(nme.SyntacticDefDef, reify(mods), reify(name), reify(tparams),
+ reifyBuildCall(nme.ImplicitParams, vparamss, implparams), reify(tpt), reify(rhs))
+ else
+ reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
case SyntacticVarDef(mods, name, tpt, rhs) =>
@@ -185,12 +190,14 @@ trait Reifiers { self: Quasiquotes =>
reifyBuildCall(nme.SyntacticFunction, args, body)
case SyntacticIdent(name, isBackquoted) =>
reifyBuildCall(nme.SyntacticIdent, name, isBackquoted)
- case Block(Nil, Placeholder(Hole(tree, DotDot))) =>
+ case Q(Placeholder(Hole(tree, DotDot))) =>
mirrorBuildCall(nme.SyntacticBlock, tree)
- case Block(Nil, other) =>
+ case Q(other) =>
reifyTree(other)
- case Block(stats, last) =>
- reifyBuildCall(nme.SyntacticBlock, stats :+ last)
+ // Syntactic block always matches so we have to be careful
+ // not to cause infinite recursion.
+ case block @ SyntacticBlock(stats) if block.isInstanceOf[Block] =>
+ reifyBuildCall(nme.SyntacticBlock, stats)
case Try(block, catches, finalizer) =>
reifyBuildCall(nme.SyntacticTry, block, catches, finalizer)
case Match(selector, cases) =>
@@ -311,6 +318,8 @@ trait Reifiers { self: Quasiquotes =>
case EarlyDefPlaceholder(h @ Hole(_, DotDot)) => reifyEarlyDef(h)
case PackageStatPlaceholder(h @ Hole(_, DotDot)) => reifyPackageStat(h)
case ForEnumPlaceholder(Hole(tree, DotDot)) => tree
+ case ParamPlaceholder(Hole(tree, DotDot)) => tree
+ case List(ParamPlaceholder(Hole(tree, DotDotDot))) => tree
case List(Placeholder(Hole(tree, DotDotDot))) => tree
} {
reify(_)
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index e7d87dd3fe..8a24f721d7 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -247,15 +247,17 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (kvs1 eq kvs)
- this
- else if (kvs1.isEmpty)
- HashMap.empty[A,B]
- else if(kvs1.tail.isEmpty) {
- val kv = kvs1.head
- new HashMap1[A,B](kv._1,hash,kv._2,kv)
- } else
- new HashMapCollision1(hash, kvs1)
+ kvs1.size match {
+ case 0 =>
+ HashMap.empty[A,B]
+ case 1 =>
+ val kv = kvs1.head
+ new HashMap1(kv._1,hash,kv._2,kv)
+ case x if x == kvs.size =>
+ this
+ case _ =>
+ new HashMapCollision1(hash, kvs1)
+ }
} else this
override protected def filter0(p: ((A, B)) => Boolean, negate: Boolean, level: Int, buffer: Array[HashMap[A, B @uV]], offset0: Int): HashMap[A, B] = {
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 90aabc5a9a..c3728fa02a 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -325,36 +325,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
// Create a proxy for Java serialization that allows us to avoid mutation
// during de-serialization. This is the Serialization Proxy Pattern.
- protected final def writeReplace(): AnyRef = new SerializationProxy(this)
-}
-
-@SerialVersionUID(1L)
-private class SerializationProxy[B](@transient private var orig: List[B]) extends Serializable {
-
- private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = orig
- while (!xs.isEmpty) {
- out.writeObject(xs.head)
- xs = xs.tail
- }
- out.writeObject(ListSerializeEnd)
- }
-
- // Java serialization calls this before readResolve during de-serialization.
- // Read the whole list and store it in `orig`.
- private def readObject(in: ObjectInputStream) {
- val builder = List.newBuilder[B]
- while (true) in.readObject match {
- case ListSerializeEnd =>
- orig = builder.result()
- return
- case a =>
- builder += a.asInstanceOf[B]
- }
- }
-
- // Provide the result stored in `orig` for Java serialization
- private def readResolve(): AnyRef = orig
+ protected final def writeReplace(): AnyRef = new List.SerializationProxy(this)
}
/** The empty list.
@@ -385,8 +356,7 @@ case object Nil extends List[Nothing] {
* @version 1.0, 15/07/2003
* @since 2.8
*/
-final case class ::[B](private val hd: B, private[scala] var tl: List[B]) extends List[B] {
- override def head : B = hd
+final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
override def tail : List[B] = tl
override def isEmpty: Boolean = false
}
@@ -405,6 +375,35 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
+
+ @SerialVersionUID(1L)
+ private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable {
+
+ private def writeObject(out: ObjectOutputStream) {
+ var xs: List[A] = orig
+ while (!xs.isEmpty) {
+ out.writeObject(xs.head)
+ xs = xs.tail
+ }
+ out.writeObject(ListSerializeEnd)
+ }
+
+ // Java serialization calls this before readResolve during de-serialization.
+ // Read the whole list and store it in `orig`.
+ private def readObject(in: ObjectInputStream) {
+ val builder = List.newBuilder[A]
+ while (true) in.readObject match {
+ case ListSerializeEnd =>
+ orig = builder.result()
+ return
+ case a =>
+ builder += a.asInstanceOf[A]
+ }
+ }
+
+ // Provide the result stored in `orig` for Java serialization
+ private def readResolve(): AnyRef = orig
+ }
}
/** Only used for list serialization */
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index b2a1b1ce29..7c40e84280 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -123,12 +123,12 @@ extends AbstractMap[A, B]
def hasNext = !self.isEmpty
def next(): (A,B) =
if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.tail; res }
+ else { val res = (self.key, self.value); self = self.next; res }
}.toList.reverseIterator
protected def key: A = throw new NoSuchElementException("empty map")
protected def value: B = throw new NoSuchElementException("empty map")
- override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
/** This class represents an entry in the `ListMap`.
*/
@@ -142,7 +142,7 @@ extends AbstractMap[A, B]
override def size: Int = size0(this, 0)
// to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
+ @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
/** Is this an empty map?
*
@@ -163,7 +163,7 @@ extends AbstractMap[A, B]
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
- else apply0(cur.tail, k)
+ else apply0(cur.next, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
@@ -175,7 +175,7 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
- else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
+ else if (cur.next.nonEmpty) get0(cur.next, k) else None
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
@@ -196,12 +196,12 @@ extends AbstractMap[A, B]
if (cur.isEmpty)
acc.last
else if (k == cur.key)
- (cur.tail /: acc) {
+ (cur.next /: acc) {
case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
}
else
- remove0(k, cur.tail, cur::acc)
+ remove0(k, cur.next, cur::acc)
- override def tail: ListMap[A, B1] = ListMap.this
+ override protected def next: ListMap[A, B1] = ListMap.this
}
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index e21a8dfa8a..0fbf7942d4 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -82,6 +82,16 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 2 */
@@ -102,6 +112,17 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 3 */
@@ -123,6 +144,18 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else None
+ }
}
/** An optimized representation for immutable sets of size 4 */
@@ -145,6 +178,19 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: A => U): Unit = {
f(elem1); f(elem2); f(elem3); f(elem4)
}
+ override def exists(f: A => Boolean): Boolean = {
+ f(elem1) || f(elem2) || f(elem3) || f(elem4)
+ }
+ override def forall(f: A => Boolean): Boolean = {
+ f(elem1) && f(elem2) && f(elem3) && f(elem4)
+ }
+ override def find(f: A => Boolean): Option[A] = {
+ if (f(elem1)) Some(elem1)
+ else if (f(elem2)) Some(elem2)
+ else if (f(elem3)) Some(elem3)
+ else if (f(elem4)) Some(elem4)
+ else None
+ }
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 49c3b4c3cd..60de147477 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1108,11 +1108,15 @@ object Stream extends SeqFactory[Stream] {
override def isEmpty = false
override def head = hd
@volatile private[this] var tlVal: Stream[A] = _
- def tailDefined: Boolean = tlVal ne null
+ @volatile private[this] var tlGen = tl _
+ def tailDefined: Boolean = tlGen eq null
override def tail: Stream[A] = {
if (!tailDefined)
synchronized {
- if (!tailDefined) tlVal = tl
+ if (!tailDefined) {
+ tlVal = tlGen()
+ tlGen = null
+ }
}
tlVal
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index df74bb5187..29c92a111c 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -129,9 +129,20 @@ extends AbstractMap[K, V]
override def getOrElseUpdate(key: K, defaultValue: => V): V = {
val h = hashOf(key)
- val i = seekEntryOrOpen(h, key)
+ var i = seekEntryOrOpen(h, key)
if (i < 0) {
- val value = defaultValue
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val oh = _hashes
+ val ans = defaultValue
+ if (oh ne _hashes) {
+ i = seekEntryOrOpen(h, key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
_size += 1
val j = i & IndexMask
_hashes(j) = h
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 81c381279f..984ae6f7cc 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -160,9 +160,20 @@ extends AbstractMap[Long, V]
else minValue.asInstanceOf[V]
}
else {
- val i = seekEntryOrOpen(key)
+ var i = seekEntryOrOpen(key)
if (i < 0) {
- val value = defaultValue
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val ok = _keys
+ val ans = defaultValue
+ if (ok ne _keys) {
+ i = seekEntryOrOpen(key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
_size += 1
val j = i & IndexMask
_keys(j) = key
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index d271c4cdeb..4ed0687334 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -494,9 +494,9 @@ object Future {
/** Simple version of `Future.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
* Useful for reducing many `Future`s into a single `Future`.
*/
- def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
+ def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(successful(cbf(in))) {
- (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
+ (fr, fa) => for (r <- fr; a <- fa) yield (r += a)
} map (_.result())
}
@@ -569,9 +569,9 @@ object Future {
* val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
* }}}
*/
- def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
+ def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
in.foldLeft(successful(cbf(in))) { (fr, a) =>
- val fb = fn(a.asInstanceOf[A])
+ val fb = fn(a)
for (r <- fr; b <- fb) yield (r += b)
}.map(_.result())
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 86132bb876..b70426a145 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -67,7 +67,21 @@ import java.util.regex.{ Pattern, Matcher }
* Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a
* pattern match.
*
- * Note, however, that when Regex is used as an extractor in a pattern match, it
+ * Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]]
+ * needs to be initialized (by calling `hasNext` or `next()`, or causing these to be
+ * called) before information about a match can be retrieved:
+ *
+ * {{{
+ * val msg = "I love Scala"
+ *
+ * // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException
+ *
+ * val matches = " ".r.findAllIn(msg)
+ * matches.hasNext // initializes the matcher
+ * val start = matches.start
+ * }}}
+ *
+ * When Regex is used as an extractor in a pattern match, note that it
* only succeeds if the whole text can be matched. For this reason, one usually
* calls a method to find the matching substrings, and then use it as an extractor
* to break match into subgroups.
@@ -267,6 +281,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* that can be queried for data such as the text that precedes the
* match, subgroups, etc.
*
+ * Attempting to retrieve information about a match before initializing
+ * the iterator can result in [[java.lang.IllegalStateException]]s. See
+ * [[scala.util.matching.Regex.MatchIterator]] for details.
+ *
* @param source The text to match against.
* @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
@@ -476,15 +494,7 @@ trait UnanchoredRegex extends Regex {
}
/** This object defines inner classes that describe
- * regex matches and helper objects. The class hierarchy
- * is as follows:
- *
- * {{{
- * MatchData
- * / \
- * MatchIterator Match
- * }}}
- *
+ * regex matches and helper objects.
*/
object Regex {
@@ -634,7 +644,14 @@ object Regex {
def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
}
- /** A class to step through a sequence of regex matches
+ /** A class to step through a sequence of regex matches.
+ *
+ * All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw
+ * an [[java.lang.IllegalStateException]] until the matcher is initialized by
+ * calling `hasNext` or `next()` or causing these methods to be called, such as
+ * by invoking `toString` or iterating through the iterator's elements.
+ *
+ * @see [[java.util.regex.Matcher]]
*/
class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 10c2def72a..3bcf751ace 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -72,6 +72,8 @@ private[reflect] trait BuildUtils { self: Universe =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+ def toStats(tree: Tree): List[Tree]
+
def mkAnnotation(tree: Tree): Tree
def mkAnnotation(trees: List[Tree]): List[Tree]
@@ -94,6 +96,13 @@ private[reflect] trait BuildUtils { self: Universe =>
def freshTypeName(prefix: String): TypeName
+ val ImplicitParams: ImplicitParamsExtractor
+
+ trait ImplicitParamsExtractor {
+ def apply(paramss: List[List[ValDef]], implparams: List[ValDef]): List[List[ValDef]]
+ def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])]
+ }
+
val ScalaDot: ScalaDotExtractor
trait ScalaDotExtractor {
@@ -126,8 +135,8 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
- constrMods: Modifiers, vparamss: List[List[Tree]], earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
+ constrMods: Modifiers, vparamss: List[List[Tree]],
+ earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -145,7 +154,7 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree
+ parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -153,7 +162,7 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticPackageObjectDefExtractor {
def apply(name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree
+ parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef
def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -197,9 +206,10 @@ private[reflect] trait BuildUtils { self: Universe =>
val SyntacticDefDef: SyntacticDefDefExtractor
trait SyntacticDefDefExtractor {
- def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+ vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef
- def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)]
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
}
val SyntacticValDef: SyntacticValDefExtractor
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index e255d305f7..1a8885e6b5 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -214,6 +214,14 @@ trait StandardDefinitions {
/** The module symbol of module `scala.Some`. */
def SomeModule: ModuleSymbol
+ /** Function-like api that lets you acess symbol
+ * of the definition with given arity and also look
+ * through all known symbols via `seq`.
+ */
+ abstract class VarArityClassApi extends (Int => Symbol) {
+ def seq: Seq[ClassSymbol]
+ }
+
/** Function-like object that maps arity to symbols for classes `scala.ProductX`.
* - 0th element is `Unit`
* - 1st element is `Product1`
@@ -222,7 +230,7 @@ trait StandardDefinitions {
* - 23nd element is `NoSymbol`
* - ...
*/
- def ProductClass: Int => Symbol
+ def ProductClass: VarArityClassApi
/** Function-like object that maps arity to symbols for classes `scala.FunctionX`.
* - 0th element is `Function0`
@@ -232,17 +240,17 @@ trait StandardDefinitions {
* - 23nd element is `NoSymbol`
* - ...
*/
- def FunctionClass: Int => Symbol
+ def FunctionClass: VarArityClassApi
/** Function-like object that maps arity to symbols for classes `scala.TupleX`.
* - 0th element is `NoSymbol`
- * - 1st element is `Product1`
+ * - 1st element is `Tuple1`
* - ...
- * - 22nd element is `Product22`
+ * - 22nd element is `Tuple22`
* - 23nd element is `NoSymbol`
* - ...
*/
- def TupleClass: Int => Symbol
+ def TupleClass: VarArityClassApi
/** Contains Scala primitive value classes:
* - Byte
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 1c9b77581a..85a8ee0185 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -81,14 +81,14 @@ abstract class Universe extends Symbols
with Liftables
with Quasiquotes
{
- /** Use `refiy` to produce the abstract syntax tree representing a given Scala expression.
+ /** Use `reify` to produce the abstract syntax tree representing a given Scala expression.
*
* For example:
*
* {{{
- * val five = reify{ 5 } // Literal(Constant(5))
- * reify{ 2 + 4 } // Apply( Select( Literal(Constant(2)), newTermName("\$plus")), List( Literal(Constant(4)) ) )
- * reify{ five.splice + 4 } // Apply( Select( Literal(Constant(5)), newTermName("\$plus")), List( Literal(Constant(4)) ) )
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 5.toString } // Apply(Select(Literal(Constant(5)), TermName("toString")), List())
+ * reify{ five.splice.toString } // Apply(Select(five, TermName("toString")), List())
* }}}
*
* The produced tree is path dependent on the Universe `reify` was called from.
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 4fde57ed02..d634034fe9 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -48,7 +48,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
- dropOtherAnnotations(annotations, cls).nonEmpty
+ dropOtherAnnotations(annotations, cls) ne Nil
def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
//OPT inlined from exists to save on #closures; was: annotations find (_ matches cls)
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 9b19dc11cb..6106339324 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -61,6 +61,8 @@ trait BuildUtils { self: SymbolTable =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
+ def toStats(tree: Tree): List[Tree] = SyntacticBlock.unapply(tree).get
+
def mkAnnotation(tree: Tree): Tree = tree match {
case SyntacticNew(Nil, SyntacticApplied(SyntacticTypeApplied(_, _), _) :: Nil, noSelfType, Nil) =>
tree
@@ -71,18 +73,25 @@ trait BuildUtils { self: SymbolTable =>
def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation)
- def mkVparamss(argss: List[List[Tree]]): List[List[ValDef]] = argss.map(_.map(mkParam))
+ def mkParam(argss: List[List[Tree]], extraFlags: FlagSet = NoFlags): List[List[ValDef]] =
+ argss.map { args => args.map { mkParam(_, extraFlags) } }
- def mkParam(tree: Tree): ValDef = tree match {
+ def mkParam(tree: Tree, extraFlags: FlagSet): ValDef = tree match {
+ case Typed(Ident(name: TermName), tpt) =>
+ mkParam(ValDef(NoMods, name, tpt, EmptyTree), extraFlags)
case vd: ValDef =>
- var newmods = (vd.mods | PARAM) & (~DEFERRED)
+ var newmods = vd.mods & (~DEFERRED)
if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
- copyValDef(vd)(mods = newmods)
+ copyValDef(vd)(mods = newmods | extraFlags)
case _ =>
- throw new IllegalArgumentException(s"$tree is not valid represenation of function parameter, " +
+ throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " +
"""consider reformatting it into q"val $name: $T = $default" shape""")
}
+ def mkImplicitParam(args: List[Tree]): List[ValDef] = args.map(mkImplicitParam)
+
+ def mkImplicitParam(tree: Tree): ValDef = mkParam(tree, IMPLICIT | PARAM)
+
def mkTparams(tparams: List[Tree]): List[TypeDef] =
tparams.map {
case td: TypeDef => copyTypeDef(td)(mods = (td.mods | PARAM) & (~DEFERRED))
@@ -140,6 +149,16 @@ trait BuildUtils { self: SymbolTable =>
protected implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
+ object ImplicitParams extends ImplicitParamsExtractor {
+ def apply(paramss: List[List[ValDef]], implparams: List[ValDef]): List[List[ValDef]] =
+ if (implparams.nonEmpty) paramss :+ mkImplicitParam(implparams) else paramss
+
+ def unapply(vparamss: List[List[ValDef]]): Some[(List[List[ValDef]], List[ValDef])] = vparamss match {
+ case init :+ (last @ (initlast :: _)) if initlast.mods.isImplicit => Some((init, last))
+ case _ => Some((vparamss, Nil))
+ }
+ }
+
object FlagsRepr extends FlagsReprExtractor {
def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
@@ -239,14 +258,10 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[Tree],
- constrMods: Modifiers, vparamss: List[List[Tree]], earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
+ constrMods: Modifiers, vparamss: List[List[Tree]],
+ earlyDefs: List[Tree], parents: List[Tree], selfType: Tree, body: List[Tree]): ClassDef = {
val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
- val vparamss0 = vparamss.map { _.map {
- case vd: ValDef => copyValDef(vd)(mods = (vd.mods | extraFlags) & (~DEFERRED))
- case tree => throw new IllegalArgumentException(s"$tree is not valid representation of class parameter, " +
- """consider reformatting it into q"val $name: $T = $default" shape""")
- } }
+ val vparamss0 = mkParam(vparamss, extraFlags)
val tparams0 = mkTparams(tparams)
val parents0 = gen.mkParents(mods,
if (mods.isCase) parents.filter {
@@ -289,7 +304,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticObjectDef extends SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]) =
+ parents: List[Tree], selfType: Tree, body: List[Tree]): ModuleDef =
ModuleDef(mods, name, gen.mkTemplate(parents, mkSelfType(selfType), NoMods, Nil, earlyDefs ::: body))
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
@@ -302,7 +317,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
def apply(name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfType: Tree, body: List[Tree]): Tree =
+ parents: List[Tree], selfType: Tree, body: List[Tree]): PackageDef =
gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
@@ -381,19 +396,47 @@ trait BuildUtils { self: SymbolTable =>
}
}
+ object SyntheticUnit {
+ def unapply(tree: Tree): Boolean = tree match {
+ case Literal(Constant(())) if tree.hasAttachment[SyntheticUnitAttachment.type] => true
+ case _ => false
+ }
+ }
+
+ /** Syntactic combinator that abstracts over Block tree.
+ *
+ * Apart from providing a more straightforward api that exposes
+ * block as a list of elements rather than (stats, expr) pair
+ * it also:
+ *
+ * 1. Treats of q"" (empty tree) as zero-element block.
+ *
+ * 2. Strips trailing synthetic units which are inserted by the
+ * compiler if the block ends with a definition rather
+ * than an expression.
+ *
+ * 3. Matches non-block term trees and recognizes them as
+ * single-element blocks for sake of consistency with
+ * compiler's default to treat single-element blocks with
+ * expressions as just expressions.
+ */
object SyntacticBlock extends SyntacticBlockExtractor {
- def apply(stats: List[Tree]): Tree = gen.mkBlock(stats)
+ def apply(stats: List[Tree]): Tree =
+ if (stats.isEmpty) EmptyTree
+ else gen.mkBlock(stats)
def unapply(tree: Tree): Option[List[Tree]] = tree match {
- case self.Block(stats, expr) => Some(stats :+ expr)
- case _ if tree.isTerm => Some(tree :: Nil)
- case _ => None
+ case self.Block(stats, SyntheticUnit()) => Some(stats)
+ case self.Block(stats, expr) => Some(stats :+ expr)
+ case EmptyTree => Some(Nil)
+ case _ if tree.isTerm => Some(tree :: Nil)
+ case _ => None
}
}
object SyntacticFunction extends SyntacticFunctionExtractor {
def apply(params: List[Tree], body: Tree): Tree = {
- val params0 :: Nil = mkVparamss(params :: Nil)
+ val params0 :: Nil = mkParam(params :: Nil, PARAM)
require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values")
Function(params0, body)
}
@@ -420,11 +463,15 @@ trait BuildUtils { self: SymbolTable =>
}
object SyntacticDefDef extends SyntacticDefDefExtractor {
- def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef =
- DefDef(mods, name, mkTparams(tparams), mkVparamss(vparamss), tpt, rhs)
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree],
+ vparamss: List[List[Tree]], tpt: Tree, rhs: Tree): DefDef = {
+ val vparamss0 = mkParam(vparamss, PARAM)
+ DefDef(mods, name, mkTparams(tparams), vparamss0, tpt, rhs)
+ }
- def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)] = tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) => Some((mods, name, tparams, vparamss, tpt, rhs))
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] = tree match {
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ Some((mods, name, tparams, vparamss, tpt, rhs))
case _ => None
}
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index c2939e69b5..4d24f0b219 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -522,7 +522,7 @@ trait Definitions extends api.StandardDefinitions {
def hasJavaMainMethod(sym: Symbol): Boolean =
(sym.tpe member nme.main).alternatives exists isJavaMainMethod
- class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends (Int => Symbol) {
+ class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends VarArityClassApi {
private val offset = countFrom - init.size
private def isDefinedAt(i: Int) = i < seq.length + offset && i >= offset
val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map { i => getRequiredClass("scala." + name + i) }).toVector
@@ -625,6 +625,7 @@ trait Definitions extends api.StandardDefinitions {
def isBlackboxMacroBundleType(tp: Type) =
isMacroBundleType(tp) && (macroBundleParamInfo(tp) <:< BlackboxContextClass.tpe)
+ def isListType(tp: Type) = tp <:< classExistentialType(ListClass)
def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass)
// These "direct" calls perform no dealiasing. They are most needed when
diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala
index bb488aa2a8..1de8d425ad 100644
--- a/src/reflect/scala/reflect/internal/FreshNames.scala
+++ b/src/reflect/scala/reflect/internal/FreshNames.scala
@@ -9,6 +9,10 @@ package internal
import scala.reflect.internal.util.FreshNameCreator
trait FreshNames { self: Names =>
+ // SI-6879 Keeps track of counters that are supposed to be globally unique
+ // as opposed to traditional freshers that are unique to compilation units.
+ val globalFreshNameCreator = new FreshNameCreator
+
// default fresh name creator used to abstract over currentUnit.fresh and runtime fresh name creator
def currentFreshNameCreator: FreshNameCreator
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 09fd996f39..139a79ffe1 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -36,6 +36,10 @@ trait StdAttachments {
*/
case object ForAttachment extends PlainAttachment
+ /** Identifies unit constants which were inserted by the compiler (e.g. gen.mkBlock)
+ */
+ case object SyntheticUnitAttachment extends PlainAttachment
+
/** Untyped list of subpatterns attached to selector dummy. */
case class SubpatternsAttachment(patterns: List[Tree])
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index ed3e7dbc4c..28d799ea0c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -53,14 +53,17 @@ trait StdNames {
*
* We obtain the formula:
*
- * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + suffixLength
*
- * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ * (+suffixLength for ".class" and potential module class suffix that is added *after* this transform).
+ *
+ * MaxNameLength can therefore be computed as follows:
*/
val marker = "$$$$"
+ val maxSuffixLength = "$.class".length + 1 // potential module class suffix and file extension
val MaxNameLength = math.min(
- settings.maxClassfileName.value - 6,
- 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
+ settings.maxClassfileName.value - maxSuffixLength,
+ 2 * (settings.maxClassfileName.value - maxSuffixLength - 2*marker.length - 32)
)
def toMD5(s: String, edge: Int): String = {
val prefix = s take edge
@@ -250,12 +253,13 @@ trait StdNames {
final val Quasiquote: NameType = "Quasiquote"
// quasiquote-specific names
- final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
- final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
- final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
- final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
final val QUASIQUOTE_PACKAGE_STAT: NameType = "$quasiquote$package$stat$"
+ final val QUASIQUOTE_PARAM: NameType = "$quasiquote$param$"
+ final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -305,6 +309,8 @@ trait StdNames {
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
val SUPER_PREFIX_STRING = "super$"
val WHILE_PREFIX = "while$"
+ val FRESH_PREFIX = "fresh"
+ val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names
// Compiler internal names
val ANYname: NameType = "<anyname>"
@@ -575,6 +581,7 @@ trait StdNames {
val Flag : NameType = "Flag"
val FlagsRepr: NameType = "FlagsRepr"
val Ident: NameType = "Ident"
+ val ImplicitParams: NameType = "ImplicitParams"
val Import: NameType = "Import"
val Literal: NameType = "Literal"
val LiteralAnnotArg: NameType = "LiteralAnnotArg"
@@ -703,9 +710,9 @@ trait StdNames {
val materializeTypeTag: NameType = "materializeTypeTag"
val moduleClass : NameType = "moduleClass"
val mkAnnotation: NameType = "mkAnnotation"
- val mkRefineStat: NameType = "mkRefineStat"
val mkEarlyDef: NameType = "mkEarlyDef"
val mkPackageStat: NameType = "mkPackageStat"
+ val mkRefineStat: NameType = "mkRefineStat"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
@@ -750,6 +757,7 @@ trait StdNames {
val toArray: NameType = "toArray"
val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
+ val toStats: NameType = "toStats"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index b538648b36..c088e8f57c 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -125,7 +125,7 @@ abstract class SymbolPairs {
* considered as a (lo, high) pair? Types always match. Term symbols
* match if their member types relative to `self` match.
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean
+ protected def matches(lo: Symbol, high: Symbol): Boolean
/** The parents and base classes of `base`. Can be refined in subclasses.
*/
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index f49ddaf6ca..26914fecc1 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3404,8 +3404,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the new list of info-adjusted symbols
*/
def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = {
- val syms1 = syms map symFn
- syms1 map (_ substInfo (syms, syms1))
+ val syms1 = mapList(syms)(symFn)
+ mapList(syms1)(_ substInfo (syms, syms1))
}
/** Derives a new Type by first deriving new symbols as in deriveSymbols,
@@ -3445,9 +3445,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @return the newly created, info-adjusted symbols
*/
def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
- cloneSymbols(syms) map (_ modifyInfo infoFn)
+ mapList(cloneSymbols(syms))(_ modifyInfo infoFn)
def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
- cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
+ mapList(cloneSymbolsAtOwner(syms, owner))(_ modifyInfo infoFn)
/** Functions which perform the standard clone/substituting on the given symbols and type,
* then call the creator function with the new symbols and type as arguments.
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index b16cbd8325..29fdba2781 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -49,10 +49,10 @@ abstract class TreeGen extends macros.TreeBuilder {
mkMethodCall(Select(receiver, method), targs, args)
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
- Apply(mkTypeApply(target, targs map TypeTree), args)
+ Apply(mkTypeApply(target, mapList(targs)(TypeTree)), args)
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
- mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+ mkTypeApply(mkAttributedRef(method), mapList(targs)(TypeTree))
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -281,7 +281,7 @@ abstract class TreeGen extends macros.TreeBuilder {
case tree :: Nil if flattenUnary =>
tree
case _ =>
- Apply(scalaDot(TupleClass(elems.length).companionModule.name), elems)
+ Apply(scalaDot(TupleClass(elems.length).name.toTermName), elems)
}
def mkTupleType(elems: List[Tree], flattenUnary: Boolean = true): Tree = elems match {
@@ -342,7 +342,7 @@ abstract class TreeGen extends macros.TreeBuilder {
}
param
}
-
+
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
val gvdefs = evdefs map {
@@ -381,11 +381,11 @@ abstract class TreeGen extends macros.TreeBuilder {
}
constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
// Field definitions for the class - remove defaults.
-
+
val fieldDefs = vparamss.flatten map (vd => {
val field = copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree)
// Prevent overlapping of `field` end's position with default argument's start position.
- // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
+ // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when
// the `pos` is a point position with all its values equal to `vd.rhs.pos.start`.
if(field.pos.isRange && vd.rhs.pos.isRange) field.pos = field.pos.withEnd(vd.rhs.pos.start - 1)
field
@@ -444,13 +444,23 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ /** Create a literal unit tree that is inserted by the compiler but not
+ * written by end user. It's important to distinguish the two so that
+ * quasiquotes can strip synthetic ones away.
+ */
+ def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment)
+
/** Create block of statements `stats` */
def mkBlock(stats: List[Tree]): Tree =
if (stats.isEmpty) Literal(Constant(()))
- else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
+ else if (!stats.last.isTerm) Block(stats, mkSyntheticUnit())
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
+ /** Create a block that wraps multiple statements but don't
+ * do any wrapping if there is just one statement. Used by
+ * quasiquotes, macro c.parse api and toolbox.
+ */
def mkTreeOrBlock(stats: List[Tree]) = stats match {
case Nil => EmptyTree
case head :: Nil => head
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 497a7c91b1..7bab15b0f4 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -284,17 +284,17 @@ abstract class TreeInfo {
/** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
* same object?
*/
- def isSelfConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
- case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSelfConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Ident(nme.CONSTRUCTOR) => true
+ case Select(This(_), nme.CONSTRUCTOR) => true
+ case _ => false
}
/** Is tree a super constructor call?
*/
- def isSuperConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ def isSuperConstrCall(tree: Tree): Boolean = dissectApplied(tree).core match {
+ case Select(Super(_, _), nme.CONSTRUCTOR) => true
+ case _ => false
}
/**
@@ -848,8 +848,10 @@ abstract class TreeInfo {
case _ => false
})
- def isMacroApplication(tree: Tree): Boolean =
- !tree.isDef && tree.symbol != null && tree.symbol.isTermMacro && !tree.symbol.isErroneous
+ def isMacroApplication(tree: Tree): Boolean = !tree.isDef && {
+ val sym = tree.symbol
+ sym != null && sym.isTermMacro && !sym.isErroneous
+ }
def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
case Block(_, expr) => isMacroApplicationOrBlock(expr)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 6dac20459e..53c528a2bb 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -1977,7 +1977,7 @@ trait Types
def pendingVolatiles = _pendingVolatiles
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
- require(args0.nonEmpty, this)
+ require(args0 ne Nil, this)
/** No unapplied type params size it has (should have) equally as many args. */
override def isHigherKinded = false
@@ -2062,7 +2062,7 @@ trait Types
// to a java or scala symbol, but it does matter whether it occurs in java or scala code.
// TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
// represented as existential types.
- override def isHigherKinded = typeParams.nonEmpty
+ override def isHigherKinded = (typeParams ne Nil)
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
@@ -2248,7 +2248,7 @@ trait Types
//OPT specialize hashCode
override final def computeHashCode = {
import scala.util.hashing.MurmurHash3._
- val hasArgs = args.nonEmpty
+ val hasArgs = args ne Nil
var h = productSeed
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
@@ -2439,7 +2439,7 @@ trait Types
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
- if (args.nonEmpty) {
+ if (args ne Nil) {
if (sym.isAliasType) new AliasArgsTypeRef(pre, sym, args)
else if (sym.isAbstractType) new AbstractArgsTypeRef(pre, sym, args)
else new ClassArgsTypeRef(pre, sym, args)
@@ -2512,7 +2512,7 @@ trait Types
true
}
- def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isImplicit = (params ne Nil) && params.head.isImplicit
def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
@@ -2520,7 +2520,7 @@ trait Types
override def paramss: List[List[Symbol]] = params :: resultType.paramss
- override def paramTypes = params map (_.tpe)
+ override def paramTypes = mapList(params)(symTpe) // OPT use mapList rather than .map
override def boundSyms = resultType.boundSyms ++ params
@@ -4147,7 +4147,7 @@ trait Types
&& (variance.isCovariant || isSubType(t2, t1, depth))
)
- corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
+ corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
}
def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
@@ -4331,7 +4331,7 @@ trait Types
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
- tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
+ mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
@@ -4580,7 +4580,10 @@ trait Types
private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
- private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+ private[scala] val treeTpe = (t: Tree) => t.tpe
+ private[scala] val symTpe = (sym: Symbol) => sym.tpe
+ private[scala] val symInfo = (sym: Symbol) => sym.info
+ private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations ne Nil
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 28afd18fe0..816916787e 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -51,6 +51,8 @@ abstract class MutableSettings extends AbsSettings {
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
+
+ def isScala211: Boolean
}
object MutableSettings {
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 3c4e93f11d..f9b10c90be 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -173,11 +173,11 @@ trait TypeComparers {
// SI-2066 This prevents overrides with incompatible variance in higher order type parameters.
private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = {
def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod)
- ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
+ !settings.isScala211 || ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance
}
private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) =
- methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
+ !settings.isScala211 || methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant
def isSameType2(tp1: Type, tp2: Type): Boolean = {
def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 09f4389b82..f427813c01 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -861,7 +861,7 @@ private[internal] trait TypeMaps {
class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
private val actuals = actuals0.toIndexedSeq
private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+ def existentialsNeeded: List[Symbol] = existentials.iterator.filter(_ ne null).toList
private object StableArg {
def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 7cc2952c96..d128521be8 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -47,6 +47,30 @@ trait Collections {
final def mforeach[A](xss: List[List[A]])(f: A => Unit) = xss foreach (_ foreach f)
final def mforeach[A](xss: Traversable[Traversable[A]])(f: A => Unit) = xss foreach (_ foreach f)
+ /** A version of List#map, specialized for List, and optimized to avoid allocation if `as` is empty */
+ final def mapList[A, B](as: List[A])(f: A => B): List[B] = if (as eq Nil) Nil else {
+ val head = new ::[B](f(as.head), Nil)
+ var tail: ::[B] = head
+ var rest = as.tail
+ while (rest ne Nil) {
+ val next = new ::(f(rest.head), Nil)
+ tail.tl = next
+ tail = next
+ rest = rest.tail
+ }
+ head
+ }
+
+ final def collectFirst[A, B](as: List[A])(pf: PartialFunction[A, B]): Option[B] = {
+ @tailrec
+ def loop(rest: List[A]): Option[B] = rest match {
+ case Nil => None
+ case a :: as if pf.isDefinedAt(a) => Some(pf(a))
+ case a :: as => loop(as)
+ }
+ loop(as)
+ }
+
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
var ys1 = xs1
@@ -99,15 +123,19 @@ trait Collections {
else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
}
final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
- val lb = new ListBuffer[C]
+ var lb: ListBuffer[C] = null
var ys1 = xs1
var ys2 = xs2
while (!ys1.isEmpty && !ys2.isEmpty) {
- lb ++= f(ys1.head, ys2.head)
+ val cs = f(ys1.head, ys2.head)
+ if (cs ne Nil) {
+ if (lb eq null) lb = new ListBuffer[C]
+ lb ++= cs
+ }
ys1 = ys1.tail
ys2 = ys2.tail
}
- lb.toList
+ if (lb eq null) Nil else lb.result
}
final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index 9866b043bb..8791d8eb23 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -40,8 +40,8 @@ abstract class SourceFile {
def lineToString(index: Int): String = {
val start = lineToOffset(index)
var end = start
- while (!isEndOfLine(end)) end += 1
- content.slice(start, end) mkString ""
+ while (!isEndOfLine(end) && end <= length) end += 1
+ new String(content, start, end - start)
}
@tailrec
@@ -136,14 +136,20 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
private def charAtIsEOL(idx: Int)(p: Char => Boolean) = {
// don't identify the CR in CR LF as a line break, since LF will do.
- def notCRLF0 = content(idx) != CR || idx + 1 >= length || content(idx + 1) != LF
+ def notCRLF0 = content(idx) != CR || !content.isDefinedAt(idx + 1) || content(idx + 1) != LF
idx < length && notCRLF0 && p(content(idx))
}
def isLineBreak(idx: Int) = charAtIsEOL(idx)(isLineBreakChar)
- def isEndOfLine(idx: Int) = charAtIsEOL(idx) {
+ /** True if the index is included by an EOL sequence. */
+ def isEndOfLine(idx: Int) = (content isDefinedAt idx) && PartialFunction.cond(content(idx)) {
+ case CR | LF => true
+ }
+
+ /** True if the index is end of an EOL sequence. */
+ def isAtEndOfLine(idx: Int) = charAtIsEOL(idx) {
case CR | LF => true
case _ => false
}
@@ -151,7 +157,7 @@ class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extend
def calculateLineIndices(cs: Array[Char]) = {
val buf = new ArrayBuffer[Int]
buf += 0
- for (i <- 0 until cs.length) if (isEndOfLine(i)) buf += i + 1
+ for (i <- 0 until cs.length) if (isAtEndOfLine(i)) buf += i + 1
buf += cs.length // sentinel, so that findLine below works smoother
buf.toArray
}
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 039e75fbee..5ccdc15a03 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -43,7 +43,7 @@ abstract class Attachments { self =>
/** Check underlying payload contains an instance of type `T`. */
def contains[T: ClassTag]: Boolean =
- all exists matchesTag[T]
+ !isEmpty && (all exists matchesTag[T])
/** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
* Replaces an existing payload of the same type, if exists.
@@ -57,6 +57,8 @@ abstract class Attachments { self =>
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
else new NonemptyAttachments[Pos](this.pos, newAll)
}
+
+ def isEmpty: Boolean = true
}
// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
@@ -64,4 +66,5 @@ abstract class Attachments { self =>
private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
type Pos = P
def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+ override def isEmpty: Boolean = false
}
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index af60dffbfc..4f3448e1ed 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -6,33 +6,51 @@ package macros
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
* A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that
- * provides functions that generate unique names.
+ * provides functions that generate fresh names.
+ *
+ * In the current implementation, fresh names are more or less unique in the sense that
+ * within the same compilation run they are guaranteed not to clash with:
+ * 1) Results of past and future invocations of functions of `freshName` family
+ * 2) User-defined or macro-generated names that don't contain dollar symbols
+ * 3) Macro-generated names that are created by concatenating names from the first, second and third categories
+ *
+ * Uniqueness of fresh names across compilation runs is not guaranteed, but that's something
+ * that we would like to improve upon in future releases. See [[https://issues.scala-lang.org/browse/SI-6879]] for more information.
+ *
+ * @define freshNameNoParams
+ * Creates a string that represents a more or less unique name.
+ * Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
+ *
+ * @define freshNameStringParam
+ * Creates a string that represents a more or less unique name having a given prefix.
+ * Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
+ *
+ * @define freshNameNameParam
+ * Creates a more or less unique name having a given name as a prefix and
+ * having the same flavor (term name or type name) as the given name.
+ * Consult [[scala.reflect.macros.Names]] for more information on uniqueness of such names.
*/
trait Names {
self: blackbox.Context =>
- /** Creates a unique string. */
+ /** $freshNameNoParams */
@deprecated("Use freshName instead", "2.11.0")
def fresh(): String
- /** Creates a unique string having a given prefix. */
+ /** $freshNameStringParam */
@deprecated("Use freshName instead", "2.11.0")
def fresh(name: String): String
- /** Creates a unique name having a given name as a prefix and
- * having the same flavor (term name or type name) as the given name.
- */
+ /** $freshNameNameParam */
@deprecated("Use freshName instead", "2.11.0")
def fresh[NameType <: Name](name: NameType): NameType
- /** Creates a unique string. */
+ /** $freshNameNoParams */
def freshName(): String
- /** Creates a unique string having a given prefix. */
+ /** $freshNameStringParam */
def freshName(name: String): String
- /** Creates a unique name having a given name as a prefix and
- * having the same flavor (term name or type name) as the given name.
- */
+ /** $freshNameNameParam */
def freshName[NameType <: Name](name: NameType): NameType
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 54b75b8e5b..cfd66744ff 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -21,7 +21,7 @@ class JavaUniverse extends internal.SymbolTable with JavaUniverseForce with Refl
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
- val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
+ def currentFreshNameCreator = globalFreshNameCreator
// can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
object treeInfo extends {
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index b9b171c7ed..8811b5513e 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -58,6 +58,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.CompoundTypeTreeOriginalAttachment
this.BackquotedIdentifierAttachment
this.ForAttachment
+ this.SyntheticUnitAttachment
this.SubpatternsAttachment
this.noPrint
this.typeDebug
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 11db83d7d5..de5ba99900 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -48,4 +48,5 @@ private[reflect] class Settings extends MutableSettings {
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
+ def isScala211 = true
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index e5dbaa3fd5..5ea1443a19 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -122,6 +122,11 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
)
+ val docAuthor = BooleanSetting (
+ "-author",
+ "Include authors."
+ )
+
val docDiagrams = BooleanSetting (
"-diagrams",
"Create inheritance diagrams for classes, traits and packages."
@@ -207,7 +212,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
- docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docAuthor, docDiagrams, docDiagramsDebug, docDiagramsDotPath,
docDiagramsDotTimeout, docDiagramsDotRestart,
docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 119d4e0143..26ee005d3e 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -351,6 +351,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
+ val authorComment =
+ if (! s.docAuthor || mbr.comment.isEmpty ||
+ mbr.comment.isDefined && mbr.comment.get.authors.isEmpty) NodeSeq.Empty
+ else <div class="comment cmt">
+ {if (mbr.comment.get.authors.size > 1) <h6>Authors:</h6> else <h6>Author:</h6>}
+ { mbr.comment.get.authors map bodyToHtml}
+ </div>
+
val paramComments = {
val prs: List[ParameterEntity] = mbr match {
case cls: Class => cls.typeParams ::: cls.valueParams.flatten
@@ -681,7 +689,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
+ memberComment ++ authorComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 8f217e087c..ef84ac42ba 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -10,6 +10,7 @@ import diagram._
import scala.collection._
import scala.util.matching.Regex
+import scala.reflect.macros.internal.macroImpl
import symtab.Flags
import io._
@@ -80,7 +81,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- def annotations = sym.annotations.map(makeAnnotation)
+ def annotations = sym.annotations.filterNot(_.tpe =:= typeOf[macroImpl]).map(makeAnnotation)
def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
def isType = sym.name.isTypeName
}
@@ -145,6 +146,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
+ if (sym.isMacro) fgs += Paragraph(Text("macro"))
fgs.toList
}
def deprecation =