summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Typers.scala2
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala23
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala4
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala10
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala151
-rw-r--r--src/library/scala/collection/BitSetLike.scala16
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala5
-rw-r--r--src/library/scala/collection/mutable/Map.scala2
-rw-r--r--src/library/scala/math/BigDecimal.scala8
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala79
-rw-r--r--test/files/neg/macro-without-xmacros-a.check6
-rw-r--r--test/files/neg/macro-without-xmacros-b.check6
-rw-r--r--test/files/neg/t6040.check2
-rw-r--r--test/files/neg/t6952.check4
-rw-r--r--test/files/neg/t7324.check4
-rw-r--r--test/files/neg/t7324.scala57
-rw-r--r--test/files/neg/t7388.check4
-rw-r--r--test/files/neg/t7388.scala1
-rw-r--r--test/files/pos/t7329.scala1
-rw-r--r--test/files/pos/t7377/Client_2.scala11
-rw-r--r--test/files/pos/t7377/Macro_1.scala7
-rw-r--r--test/files/pos/t7377b.scala13
-rw-r--r--test/files/run/bitsets.check4
-rw-r--r--test/files/run/bitsets.scala14
-rw-r--r--test/files/run/t4023.scala31
-rw-r--r--test/files/run/t7319.check38
-rw-r--r--test/files/run/t7319.scala13
-rw-r--r--test/files/run/t7337.check1
-rw-r--r--test/files/run/t7337.scala19
-rw-r--r--test/scaladoc/run/t5527.check40
-rw-r--r--test/scaladoc/run/t5527.scala48
46 files changed, 499 insertions, 183 deletions
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/runtime/Typers.scala
index 7e268247dd..070b288c3a 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Typers.scala
@@ -11,7 +11,7 @@ trait Typers {
def openImplicits: List[(Type, Tree)] = callsiteTyper.context.openImplicits
/**
- * @see [[scala.tools.reflect.Toolbox.typeCheck]]
+ * @see [[scala.tools.reflect.ToolBox.typeCheck]]
*/
def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 11857e2172..74949fce6d 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -8,8 +8,9 @@ import scala.reflect.reify.utils.Utils
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
* See more info in the comments to `reify` in scala.reflect.api.Universe.
*
- * @author Martin Odersky
- * @version 2.10
+ * @author Martin Odersky
+ * @version 2.10
+ * @since 2.10
*/
abstract class Reifier extends States
with Phases
@@ -31,20 +32,20 @@ abstract class Reifier extends States
this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
override def hasReifier = true
- /**
- * For `reifee` and other reification parameters, generate a tree of the form
- *
+ /** For `reifee` and other reification parameters, generate a tree of the form
+ * {{{
* {
- * val $u: universe.type = <[ universe ]>
- * val $m: $u.Mirror = <[ mirror ]>
- * $u.Expr[T](rtree) // if data is a Tree
- * $u.TypeTag[T](rtree) // if data is a Type
+ * val \$u: universe.type = <[ universe ]>
+ * val \$m: \$u.Mirror = <[ mirror ]>
+ * \$u.Expr[T](rtree) // if data is a Tree
+ * \$u.TypeTag[T](rtree) // if data is a Type
* }
+ * }}}
*
* where
*
- * - `universe` is the tree that represents the universe the result will be bound to
- * - `mirror` is the tree that represents the mirror the result will be initially bound to
+ * - `universe` is the tree that represents the universe the result will be bound to.
+ * - `mirror` is the tree that represents the mirror the result will be initially bound to.
* - `rtree` is code that generates `reifee` at runtime.
* - `T` is the type that corresponds to `data`.
*
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index 18ea908cdf..ea6b5886cc 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -11,7 +11,7 @@ trait Metalevels {
/**
* Makes sense of cross-stage bindings.
*
- * ================
+ * ----------------
*
* Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
* Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
@@ -53,7 +53,7 @@ trait Metalevels {
* 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
* 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
*
- * ================
+ * ----------------
*
* As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
* Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 5ad494177c..f2e5c9b1eb 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -366,7 +366,10 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+ case None =>
+ val pos = docCommentPos(sym)
+ val loc = pos withPoint (pos.start + vstart + 1)
+ reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 2f981d23f6..bd3af5f9a2 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -361,7 +361,7 @@ self =>
if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
- /** Here we are building an AST representing the following source fiction,
+ /* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 66a58870cc..1b183ddd3f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -25,6 +25,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import icodes.opcodes._
import definitions._
+ // Strangely I can't find this in the asm code
+ // 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
val phaseName = "jvm"
/** Create a new phase */
@@ -1480,6 +1484,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+ if (m.params.size > MaximumJvmParameters) {
+ getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
debuglog("Generating method " + m.symbol.fullName)
method = m
computeLocalVarsIndex(m)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 2ad474cf3f..59df8e56c1 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -8,11 +8,11 @@ package backend.jvm
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
+/** Code shared between the erstwhile legacy backend (aka GenJVM)
+ * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ * more here, but for now I'm starting with the refactorings that are either
+ * straightforward to review or necessary for maintenance.
+ */
trait GenJVMASM {
val global: Global
import global._
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 4d086e787e..cd23ad74e4 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -251,8 +251,7 @@ class MutableSettings(val errorFn: String => Unit)
else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
new PlainFile(Path(name))
else
-// throw new FatalError(name + " does not exist or is not a directory")
- dir
+ throw new FatalError(name + " does not exist or is not a directory")
)
/** Set the single output directory. From now on, all files will
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a8a47205dd..7cec57968c 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1179,7 +1179,7 @@ abstract class ClassfileParser {
else enclosing.info member name
)
enteringTyperIfPossible(getMember)
- /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ /* There used to be an assertion that this result is not NoSymbol; changing it to an error
* revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
* in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
* thought it wasn't triggering, I removed it entirely.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 50487ad123..7aaeb56deb 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -73,7 +73,7 @@ abstract class ICodeReader extends ClassfileParser {
private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
val jflags = JavaAccFlags(u2)
val name = pool getName u2
- /** If we're parsing a scala module, the owner of members is always
+ /* If we're parsing a scala module, the owner of members is always
* the module symbol.
*/
val owner = (
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index ac18e5ba4f..2ece06c801 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -266,7 +266,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
)
- /** The Tree => Tree function in the return is necessary to prevent the original qual
+ /* The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
* so all the test logic is routed through gen.evalOnce, which creates a block like
* { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 367825c251..1aa5f10738 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -378,7 +378,7 @@ abstract class ExplicitOuter extends InfoTransform
if (outerAcc.isDeferred) EmptyTree
else This(currentClass) DOT outerField(currentClass)
- /** If we don't re-type the tree, we see self-type related crashes like #266.
+ /* If we don't re-type the tree, we see self-type related crashes like #266.
*/
localTyper typed {
(DEF(outerAcc) withPos currentClass.pos withType null) === rhs
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 35df63b246..8774390c8c 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -807,7 +807,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
}
- /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+ /* return a 'lazified' version of rhs. It uses double-checked locking to ensure
* initialization is performed at most once. For performance reasons the double-checked
* locking is split into two parts, the first (fast) path checks the bitmap without
* synchronizing, and if that fails it initializes the lazy val within the
@@ -1145,7 +1145,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
qual
case Apply(Select(qual, _), args) =>
- /** Changes `qual.m(args)` where m refers to an implementation
+ /* Changes `qual.m(args)` where m refers to an implementation
* class method to Q.m(S, args) where Q is the implementation module of
* `m` and S is the self parameter for the call, which
* is determined as follows:
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 565dfde11a..4b00c0efe9 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -808,7 +808,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
- else {
+ else if (sym.hasDefault) {
+ /* Specializing default getters is useless, also see SI-7329 . */
+ sym.resetFlag(SPECIALIZED)
+ Nil
+ } else {
// debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
@@ -1385,7 +1389,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def transform1(tree: Tree) = {
val symbol = tree.symbol
- /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
+ /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
def specSym(qual: Tree): Symbol = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
def isMatch(member: Symbol) = (
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index f135f7f6ae..949cd13520 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -110,6 +110,8 @@ trait Contexts { self: Analyzer =>
case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
+ sc.flushAndReturnBuffer()
+ sc.flushAndReturnWarningsBuffer()
sc = sc.outer
}
}
@@ -518,7 +520,7 @@ trait Contexts { self: Analyzer =>
(linked ne NoSymbol) && accessWithin(linked)
}
- /** Are we inside definition of `ab`? */
+ /* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8d7830897d..4913034d3f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -287,7 +287,11 @@ trait Infer extends Checkable {
// println("set error: "+tree);
// throw new Error()
// }
- def name = newTermName("<error: " + tree.symbol + ">")
+ def name = {
+ val sym = tree.symbol
+ val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
+ newTermName(s"<error: $nameStr>")
+ }
def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 8cf47b500d..60b6248a07 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -490,14 +490,6 @@ trait Typers extends Adaptations with Tags {
}
@inline
- final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- val res = f(newTyper(c))
- if (c.hasErrors)
- context.issue(c.errBuffer.head)
- res
- }
-
- @inline
final def withSavedContext[T](c: Context)(f: => T) = {
val savedErrors = c.flushAndReturnBuffer()
val res = f
@@ -740,16 +732,19 @@ trait Typers extends Adaptations with Tags {
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
- val req = if (required) "needs to" else "should"
- var raw = featureDesc + " " + req + " be enabled\n" +
- "by making the implicit value language." + featureName + " visible."
- if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
- "or by setting the compiler option -language:" + featureName + ".\n" +
- "See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
- "why the feature " + req + " be explicitly enabled."
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (currentRun.reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
currentRun.reportedFeature += featureTrait
- val msg = raw replace ("#", construct)
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
if (required) unit.error(pos, msg)
else currentRun.featureWarnings.warn(pos, msg)
}
@@ -1802,9 +1797,7 @@ trait Typers extends Adaptations with Tags {
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
- _.typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
- }
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
@@ -1845,17 +1838,16 @@ trait Typers extends Adaptations with Tags {
|| !linkedClass.isSerializable
|| clazz.isSerializable
)
- val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
- _.typedTemplate(mdef.impl, {
- typedParentTypes(mdef.impl) ++ (
- if (noSerializable) Nil
- else {
- clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
- }
- )
- })
- }
+ val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
+ typedParentTypes(mdef.impl) ++ (
+ if (noSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (mdef.symbol == PredefModule)
@@ -4435,63 +4427,58 @@ trait Typers extends Adaptations with Tags {
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && isPatternMode) {
- // treat stable function applications f() as expressions.
- typed1(tree, (mode &~ PATTERNmode) | EXPRmode, pt)
- } else {
- val funpt = if (isPatternMode) pt else WildcardType
- val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
-
- def onError(reportError: => Tree): Tree = {
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- case _ =>
+ val funpt = if (isPatternMode) pt else WildcardType
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+ def onError(reportError: => Tree): Tree = {
+ fun match {
+ case Select(qual, name)
+ if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ } else {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- }
- val silentResult = silent(
- op = _.typed(fun, mode.forFunMode, funpt),
- reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
- newtree = if (mode.inExprMode) tree else context.tree
- )
- silentResult match {
- case SilentResultValue(fun1) =>
- val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
- val noSecondTry = (
- isPastTyper
- || (fun2.symbol ne null) && fun2.symbol.isConstructor
- || (fun2.tpe match { case mt: MethodType => mt.isImplicit case _ => false })
- )
- val isFirstTry = !noSecondTry && (
- fun2 match {
- case Select(_, _) => mode inExprModeButNot SNDTRYmode
- case _ => false
+ reportError
}
- )
- if (isFirstTry)
- tryTypedApply(fun2, args)
- else
- doTypedApply(tree, fun2, args, mode, pt)
+ case _ =>
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ }
+ val silentResult = silent(
+ op = _.typed(fun, mode.forFunMode, funpt),
+ reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
+ newtree = if (mode.inExprMode) tree else context.tree
+ )
+ silentResult match {
+ case SilentResultValue(fun1) =>
+ val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
+ val noSecondTry = (
+ isPastTyper
+ || (fun2.symbol ne null) && fun2.symbol.isConstructor
+ || (fun2.tpe match { case mt: MethodType => mt.isImplicit case _ => false })
+ )
+ val isFirstTry = !noSecondTry && (
+ fun2 match {
+ case Select(_, _) => mode inExprModeButNot SNDTRYmode
+ case _ => false
+ }
+ )
+ if (isFirstTry)
+ tryTypedApply(fun2, args)
+ else
+ doTypedApply(tree, fun2, args, mode, pt)
- case SilentTypeError(err) =>
- onError({issue(err); setError(tree)})
- }
+ case SilentTypeError(err) =>
+ onError({issue(err); setError(tree)})
}
}
- // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
// convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
// where Array HK gets applied (N-1) times
object ArrayInstantiation {
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 034ed2b24a..9c4e710558 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -106,8 +106,8 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
private var current = start
private val end = nwords * WordLength
def hasNext: Boolean = {
- while (current < end && !self.contains(current)) current += 1
- current < end
+ while (current != end && !self.contains(current)) current += 1
+ current != end
}
def next(): Int =
if (hasNext) { val r = current; current += 1; r }
@@ -117,7 +117,10 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
override def foreach[B](f: Int => B) {
for (i <- 0 until nwords) {
val w = word(i)
- for (j <- i * WordLength until (i + 1) * WordLength) {
+ /* NOTE: `until` instead of `to` will not work here because
+ the maximum value of `(i + 1) * WordLength` could be
+ `Int.MaxValue + 1` (i.e. `Int.MinValue`). */
+ for (j <- i * WordLength to (i + 1) * WordLength - 1) {
if ((w & (1L << j)) != 0L) f(j)
}
}
@@ -197,11 +200,15 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
- for (i <- 0 until nwords * WordLength)
+ val max = nwords * WordLength
+ var i = 0
+ while(i != max) {
if (contains(i)) {
sb append pre append i
pre = sep
}
+ i += 1
+ }
sb append end
}
@@ -212,6 +219,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
object BitSetLike {
private[collection] val LogWL = 6
private val WordLength = 64
+ private[collection] val MaxSize = (Int.MaxValue >> LogWL) + 1
private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = {
var len = elems.length
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 397f8099eb..29c341590d 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import BitSetLike.{LogWL, updateArray}
+import BitSetLike.{LogWL, MaxSize, updateArray}
/** A class for mutable bitsets.
*
@@ -63,9 +63,10 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
}
private def ensureCapacity(idx: Int) {
+ require(idx < MaxSize)
if (idx >= nwords) {
var newlen = nwords
- while (idx >= newlen) newlen = newlen * 2
+ while (idx >= newlen) newlen = (newlen * 2) min MaxSize
val elems1 = new Array[Long](newlen)
Array.copy(elems, 0, elems1, 0, nwords)
elems = elems1
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index f72e1fc4e7..f68e4004f2 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -47,7 +47,7 @@ trait Map[A, B]
*/
def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
- /** Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
+ /* Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
def readOnly : scala.collection.Map[A, B] = new scala.collection.Map[A, B] {
override def size = self.size
override def update(key: A, value: B) = self.update(key, value)
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index d8f4337b8f..c5e5699468 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -333,21 +333,21 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
override def byteValue = intValue.toByte
/** Converts this BigDecimal to a Short.
- * If the BigDecimal is too big to fit in a Byte, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value as well as return a result with the opposite sign.
*/
override def shortValue = intValue.toShort
/** Converts this BigDecimal to a Char.
- * If the BigDecimal is too big to fit in a char, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value and that it always returns a positive result.
*/
def charValue = intValue.toChar
/** Converts this BigDecimal to an Int.
- * If the BigDecimal is too big to fit in a char, only the low-order 32 bits
+ * If the BigDecimal is too big to fit in an Int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
@@ -355,7 +355,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
def intValue = this.bigDecimal.intValue
/** Converts this BigDecimal to a Long.
- * If the BigDecimal is too big to fit in a char, only the low-order 64 bits
+ * If the BigDecimal is too big to fit in a Long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index faf61e5205..b682b7d0ca 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -9,7 +9,6 @@ package internal
import scala.annotation.switch
object ClassfileConstants {
-
final val JAVA_MAGIC = 0xCAFEBABE
final val JAVA_MAJOR_VERSION = 45
final val JAVA_MINOR_VERSION = 3
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 60d9e1c3cd..cd41f533bb 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3062,7 +3062,7 @@ trait Types
else lhs <:< rhs
}
- /** Simple case: type arguments can be ignored, because either this typevar has
+ /* Simple case: type arguments can be ignored, because either this typevar has
* no type parameters, or we are comparing to Any/Nothing.
*
* The latter condition is needed because HK unification is limited to constraints of the shape
@@ -3089,7 +3089,7 @@ trait Types
} else false
}
- /** Full case: involving a check of the form
+ /* Full case: involving a check of the form
* {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
* }}}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index da8e64ea16..c1fb0c0107 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -186,7 +186,7 @@ trait TypeComparers {
}
def isSameType2(tp1: Type, tp2: Type): Boolean = {
- /** Here we highlight those unfortunate type-like constructs which
+ /* Here we highlight those unfortunate type-like constructs which
* are hidden bundles of mutable state, cruising the type system picking
* up any type constraints naive enough to get into their hot rods.
*/
@@ -215,7 +215,7 @@ trait TypeComparers {
}
case _ => false
}
- /** Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
+ /* Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
* https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA
*/
def sameTypeAndSameCaseClass = tp1 match {
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index bf6d6ffed7..003c439f65 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -9,7 +9,6 @@ package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
import scala.reflect.internal.Chars._
import symtab._
-import reporters.Reporter
import typechecker.Analyzer
import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
@@ -150,20 +149,54 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
- private var docBuffer: StringBuilder = null // buffer for comments
- private var docPos: Position = NoPosition // last doc comment position
- private var inDocComment = false
+ private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning)
+ private var inDocComment = false // if buffer contains double-star doc comment
+ private var lastDoc: DocComment = null // last comment if it was double-star doc
+ private lazy val unmooredParser = { // minimalist comment parser
+ import scala.tools.nsc.doc.base.{comment => _, _}
+ new {
+ val global: Global = ScaladocSyntaxAnalyzer.this.global
+ } with CommentFactoryBase with MemberLookupBase {
+ import global.{ settings, Symbol }
+ def parseComment(comment: DocComment) = {
+ val nowarnings = settings.nowarn.value
+ settings.nowarn.value = true
+ try parseAtSymbol(comment.raw, comment.raw, comment.pos)
+ finally settings.nowarn.value = nowarnings
+ }
+
+ override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
+ override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption orNull
+ override def toString(link: LinkTo): String = "No link"
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
+ override def warnNoLink: Boolean = false
+ }
+ }
+
+ /**
+ * Warn when discarding buffered doc at the end of a block.
+ * This mechanism doesn't warn about arbitrary unmoored doc.
+ * Also warn under -Xlint, but otherwise only warn in the presence of suspicious
+ * tags that appear to be documenting API. Warnings are suppressed while parsing
+ * the local comment so that comments of the form `[at] Martin` will not trigger a warning.
+ * By omission, tags for `see`, `todo`, `note` and `example` are ignored.
+ */
override def discardDocBuffer() = {
+ import scala.tools.nsc.doc.base.comment.Comment
val doc = flushDoc
- if (doc ne null)
- unit.warning(docPos, "discarding unmoored doc comment")
+ // tags that make a local double-star comment look unclean, as though it were API
+ def unclean(comment: Comment): Boolean = {
+ import comment._
+ authors.nonEmpty || result.nonEmpty || throws.nonEmpty || valueParams.nonEmpty ||
+ typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
+ }
+ def isDirty = unclean(unmooredParser parseComment doc)
+ if ((doc ne null) && (settings.lint || isDirty))
+ unit.warning(doc.pos, "discarding unmoored doc comment")
}
- override def flushDoc(): DocComment = {
- if (docBuffer eq null) null
- else try DocComment(docBuffer.toString, docPos) finally docBuffer = null
- }
+ override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
override protected def putCommentChar() {
if (inDocComment)
@@ -182,23 +215,19 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
super.skipBlockComment()
}
override def skipComment(): Boolean = {
- super.skipComment() && {
- if (docBuffer ne null) {
- if (inDocComment)
- foundDocComment(docBuffer.toString, offset, charOffset - 2)
- else
- try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null
- }
+ // emit a block comment; if it's double-star, make Doc at this pos
+ def foundStarComment(start: Int, end: Int) = try {
+ val str = docBuffer.toString
+ val pos = new RangePosition(unit.source, start, start, end)
+ unit.comment(pos, str)
+ if (inDocComment)
+ lastDoc = DocComment(str, pos)
true
+ } finally {
+ docBuffer = null
+ inDocComment = false
}
- }
- def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
- def foundDocComment(value: String, start: Int, end: Int) {
- docPos = new RangePosition(unit.source, start, start, end)
- unit.comment(docPos, value)
+ super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
}
}
class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check
index ae6c6c695a..ec194be3a9 100644
--- a/test/files/neg/macro-without-xmacros-a.check
+++ b/test/files/neg/macro-without-xmacros-a.check
@@ -1,5 +1,5 @@
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro foo_impl
^
Macros_2.scala:7: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro bar_impl
^
Macros_2.scala:11: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro quux_impl
^
three errors found
diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check
index c3cadcf36a..c97850f0a9 100644
--- a/test/files/neg/macro-without-xmacros-b.check
+++ b/test/files/neg/macro-without-xmacros-b.check
@@ -1,5 +1,5 @@
Macros_2.scala:3: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro Impls.foo_impl
^
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro Impls.bar_impl
^
Macros_2.scala:9: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro Impls.quux_impl
^
three errors found
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f91df0c46d..16c90ede7e 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,5 +1,5 @@
t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
index f1e1881404..1a591d02c6 100644
--- a/test/files/neg/t6952.check
+++ b/test/files/neg/t6952.check
@@ -1,5 +1,5 @@
t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
@@ -7,7 +7,7 @@ why the feature needs to be explicitly enabled.
trait B extends Dynamic
^
t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
trait C extends A with Dynamic
^
two errors found
diff --git a/test/files/neg/t7324.check b/test/files/neg/t7324.check
new file mode 100644
index 0000000000..586947d5e7
--- /dev/null
+++ b/test/files/neg/t7324.check
@@ -0,0 +1,4 @@
+t7324.scala:2: error: Platform restriction: a parameter list's length cannot exceed 254.
+class Bar(
+ ^
+one error found
diff --git a/test/files/neg/t7324.scala b/test/files/neg/t7324.scala
new file mode 100644
index 0000000000..81d7674d68
--- /dev/null
+++ b/test/files/neg/t7324.scala
@@ -0,0 +1,57 @@
+object Bar extends App
+class Bar(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int, _255: Int
+)
+
+class BarOK(
+_1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int,
+_11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int,
+_21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int,
+_31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int,
+_41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int,
+_51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int,
+_61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int,
+_71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int,
+_81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int,
+_91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int,
+_101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int,
+_111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int,
+_121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int,
+_131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int,
+_141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int, _150: Int,
+_151: Int, _152: Int, _153: Int, _154: Int, _155: Int, _156: Int, _157: Int, _158: Int, _159: Int, _160: Int,
+_161: Int, _162: Int, _163: Int, _164: Int, _165: Int, _166: Int, _167: Int, _168: Int, _169: Int, _170: Int,
+_171: Int, _172: Int, _173: Int, _174: Int, _175: Int, _176: Int, _177: Int, _178: Int, _179: Int, _180: Int,
+_181: Int, _182: Int, _183: Int, _184: Int, _185: Int, _186: Int, _187: Int, _188: Int, _189: Int, _190: Int,
+_191: Int, _192: Int, _193: Int, _194: Int, _195: Int, _196: Int, _197: Int, _198: Int, _199: Int, _200: Int,
+_201: Int, _202: Int, _203: Int, _204: Int, _205: Int, _206: Int, _207: Int, _208: Int, _209: Int, _210: Int,
+_211: Int, _212: Int, _213: Int, _214: Int, _215: Int, _216: Int, _217: Int, _218: Int, _219: Int, _220: Int,
+_221: Int, _222: Int, _223: Int, _224: Int, _225: Int, _226: Int, _227: Int, _228: Int, _229: Int, _230: Int,
+_231: Int, _232: Int, _233: Int, _234: Int, _235: Int, _236: Int, _237: Int, _238: Int, _239: Int, _240: Int,
+_241: Int, _242: Int, _243: Int, _244: Int, _245: Int, _246: Int, _247: Int, _248: Int, _249: Int, _250: Int,
+_251: Int, _252: Int, _253: Int, _254: Int)
diff --git a/test/files/neg/t7388.check b/test/files/neg/t7388.check
new file mode 100644
index 0000000000..0a29e04896
--- /dev/null
+++ b/test/files/neg/t7388.check
@@ -0,0 +1,4 @@
+t7388.scala:1: error: doesnotexist is not an enclosing class
+class Test private[doesnotexist]()
+ ^
+one error found
diff --git a/test/files/neg/t7388.scala b/test/files/neg/t7388.scala
new file mode 100644
index 0000000000..9ce9ea11b3
--- /dev/null
+++ b/test/files/neg/t7388.scala
@@ -0,0 +1 @@
+class Test private[doesnotexist]()
diff --git a/test/files/pos/t7329.scala b/test/files/pos/t7329.scala
new file mode 100644
index 0000000000..76bf1fb9f5
--- /dev/null
+++ b/test/files/pos/t7329.scala
@@ -0,0 +1 @@
+class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B)) \ No newline at end of file
diff --git a/test/files/pos/t7377/Client_2.scala b/test/files/pos/t7377/Client_2.scala
new file mode 100644
index 0000000000..5728956cca
--- /dev/null
+++ b/test/files/pos/t7377/Client_2.scala
@@ -0,0 +1,11 @@
+object Test {
+ M.noop(List(1) match { case Nil => 0; case (x::xs) => x })
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ M.noop(Foo(0) match { case FooAlias(_) => 0 })
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ M.noop(Bar() match { case BarAlias() => 0 })
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
new file mode 100644
index 0000000000..a0ec1d84af
--- /dev/null
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -0,0 +1,7 @@
+import language.experimental._
+import reflect.macros.Context
+
+object M {
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typeCheck(c.resetLocalAttrs(expr.tree)))
+ def noop[A](expr: A): A = macro noopImpl[A]
+}
diff --git a/test/files/pos/t7377b.scala b/test/files/pos/t7377b.scala
new file mode 100644
index 0000000000..aeee800d57
--- /dev/null
+++ b/test/files/pos/t7377b.scala
@@ -0,0 +1,13 @@
+object Test {
+ List(1) match { case Nil => 0; case (x::xs) => x }
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ Foo(0) match { case FooAlias(_) => 0 }
+ Foo(0) match { case Foo(_) => 0 }
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ Bar() match { case BarAlias() => 0 }
+ Bar() match { case Bar() => 0 }
+}
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index 41c2ccdcb8..9bbc769b72 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -42,6 +42,10 @@ b2:BitSet(5)
b3:BitSet(5, 7)
b4:BitSet(7)
b0:BitSet(5, 6, 7)
+bMax:BitSet(2147483647)
+2147483647
+bLarge:BitSet(2000000001)
+false
is0 = BitSet()
is1 = BitSet()
is2 = BitSet(2)
diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala
index d55f9e4e83..c88782cab7 100644
--- a/test/files/run/bitsets.scala
+++ b/test/files/run/bitsets.scala
@@ -115,6 +115,19 @@ object TestMutable3 {
println(s"b0:$b0")
}
+object TestMutable4 {
+ import scala.collection.mutable.BitSet
+
+ val bMax = BitSet(Int.MaxValue)
+ println(s"bMax:$bMax")
+ bMax.foreach(println)
+
+ val bLarge = BitSet(2000000001)
+ println(s"bLarge:$bLarge")
+
+ println(bMax == bLarge)
+}
+
object TestImmutable {
import scala.collection.immutable.BitSet
@@ -190,6 +203,7 @@ object Test extends App {
TestMutable
TestMutable2
TestMutable3
+ TestMutable4
TestImmutable
TestImmutable2
}
diff --git a/test/files/run/t4023.scala b/test/files/run/t4023.scala
index 4846fa31b4..38190cfa5c 100644
--- a/test/files/run/t4023.scala
+++ b/test/files/run/t4023.scala
@@ -7,17 +7,28 @@ object Test {
object B5 extends B1
private object B6 extends B2
- val valuesTry1 = this.getClass.getDeclaredClasses
- val valuesTry2 = C.getClass.getDeclaredClasses
- val valuesTry3 = getClass.getDeclaredClasses
+ val classes1 = this.getClass.getDeclaredClasses
+ val classes2 = C.getClass .getDeclaredClasses
+ val classes3 = getClass .getDeclaredClasses
+ }
+
+ // sortBy(_.getName) introduces additional classes which we don't want to see in C,
+ // so we call sortBy outside of C.
+ object TestHelper {
+ val valuesTry1 = C.classes1.sortBy(_.getName)
+ val valuesTry2 = C.classes2.sortBy(_.getName)
+ val valuesTry3 = C.classes3.sortBy(_.getName)
}
def main(args: Array[String]) {
- println("Try 1: (" + C.valuesTry1.length + " classes)")
- C.valuesTry1.foreach(println)
- println("Try 2: (" + C.valuesTry2.length + " classes)")
- C.valuesTry2.foreach(println)
- println("Try 3: (" + C.valuesTry3.length + " classes)")
- C.valuesTry3.foreach(println)
+ println("Try 1: (" + TestHelper.valuesTry1.length + " classes)")
+ TestHelper.valuesTry1.foreach(println)
+ println("Try 2: (" + TestHelper.valuesTry2.length + " classes)")
+ TestHelper.valuesTry2.foreach(println)
+ println("Try 3: (" + TestHelper.valuesTry3.length + " classes)")
+ TestHelper.valuesTry3.foreach(println)
}
-} \ No newline at end of file
+
+
+}
+
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
new file mode 100644
index 0000000000..966736915e
--- /dev/null
+++ b/test/files/run/t7319.check
@@ -0,0 +1,38 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class M[A]
+defined class M
+
+scala> implicit def ma0[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma0: [A](a: A)M[A]
+
+scala> implicit def ma1[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma1: [A](a: A)M[A]
+
+scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+warning: there were 1 feature warning(s); re-run with -feature for details
+convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
+
+scala> convert(Some[Int](0))
+<console>:12: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : Some[Int]
+ required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ convert(Some[Int](0))
+ ^
+<console>:12: error: type mismatch;
+ found : Some[Int]
+ required: F[_ <: F[_]]
+ convert(Some[Int](0))
+ ^
+
+scala> 0
+res1: Int = 0
+
+scala>
diff --git a/test/files/run/t7319.scala b/test/files/run/t7319.scala
new file mode 100644
index 0000000000..23ffeb977d
--- /dev/null
+++ b/test/files/run/t7319.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // so we can provide the ambiguities, rather than relying in Predef implicits
+ override def extraSettings = "-Yno-predef"
+ override def code = """
+class M[A]
+implicit def ma0[A](a: A): M[A] = null
+implicit def ma1[A](a: A): M[A] = null
+def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+convert(Some[Int](0))
+0""" // before the fix, this line, and all that followed, re-issued the implicit ambiguity error.
+}
diff --git a/test/files/run/t7337.check b/test/files/run/t7337.check
new file mode 100644
index 0000000000..dd2b31f23c
--- /dev/null
+++ b/test/files/run/t7337.check
@@ -0,0 +1 @@
+doesnotexist does not exist or is not a directory
diff --git a/test/files/run/t7337.scala b/test/files/run/t7337.scala
new file mode 100644
index 0000000000..d878182ed0
--- /dev/null
+++ b/test/files/run/t7337.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+import util.{CommandLineParser}
+
+object Test extends DirectTest {
+ override def code = "class C"
+ override def newCompiler(args: String*): Global = {
+ val settings = newSettings((CommandLineParser tokenize ("-d doesnotexist " + extraSettings)) ++ args.toList)
+ newCompiler(settings)
+ }
+
+ override def show() {
+ try {
+ newCompiler()
+ } catch {
+ case fe: FatalError => println(fe.getMessage)
+ }
+ }
+}
diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check
index ce649a013e..8483460d04 100644
--- a/test/scaladoc/run/t5527.check
+++ b/test/scaladoc/run/t5527.check
@@ -1,12 +1,12 @@
-newSource1:17: warning: discarding unmoored doc comment
- /** Testing 123 */
- ^
-newSource1:27: warning: discarding unmoored doc comment
- /** Calculate this result. */
+newSource1:47: warning: discarding unmoored doc comment
+ /** Document this crucial constant for posterity.
^
-newSource1:34: warning: discarding unmoored doc comment
- /** Another digit is a giveaway. */
+newSource1:64: warning: discarding unmoored doc comment
+ /*************************\
^
+newSource1:73: warning: discarding unmoored doc comment
+ val i = 10 */** Important!
+ ^
[[syntax trees at end of parser]] // newSource1
package <empty> {
object UselessComments extends scala.AnyRef {
@@ -48,6 +48,26 @@ package <empty> {
val test4 = 'a' match {
case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
case _ => false
+ };
+ def test5: scala.Unit = if (true)
+ $qmark$qmark$qmark
+ else
+ ();
+ def test6 = {
+ val u = 4;
+ 0.to(u).foreach(((i) => println(i)))
+ };
+ def test7 = {
+ val u = 4;
+ 0.to(u).foreach(((i) => println(i)))
+ };
+ def test8 = {
+ val z = "fancy";
+ z.replace("fanc", "arts")
+ };
+ def test9 = {
+ val i = 10.$times(10);
+ assert(i.$eq$eq(100))
}
};
/** comments that we should keep */
@@ -102,7 +122,11 @@ package <empty> {
super.<init>();
()
}
- }
+ };
+ /** Get the simple value.
+ * @return the default value
+ */
+ def value: Int = 7
}
}
diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala
index 2449ff60c3..60ae23c1a7 100644
--- a/test/scaladoc/run/t5527.scala
+++ b/test/scaladoc/run/t5527.scala
@@ -49,6 +49,47 @@ object Test extends DirectTest {
case _ =>
false
}
+
+ def test5 {
+ /** @martin is this right? It shouldn't flag me as scaladoc. */
+ if (true) ???
+ }
+
+ def test6 = {
+ /** Document this crucial constant for posterity.
+ * Don't forget to dedoc this comment if you refactor to a local.
+ * @author Paul Phillips
+ */
+ val u = 4
+ for (i <- 0 to u)
+ println(i)
+ }
+ def test7 = {
+ /** Some standard tags are tolerated locally and shouldn't trigger a warning.
+ * @note Don't change this unless you know what you're doing. This means you.
+ */
+ val u = 4
+ for (i <- 0 to u)
+ println(i)
+ }
+ def test8 = {
+ /*************************\
+ * Fancy ASCII Art Block *
+ * @author som-snytt *
+ \*************************/
+ // this is just a local
+ val z = "fancy"
+ z replace ("fanc", "arts")
+ }
+ def test9 = {
+ val i = 10 */** Important!
+ * We have to multiply here!
+ * @author community
+ * @see SI-1234
+ */
+ 10
+ assert(i == 100)
+ }
}
/** comments that we should keep */
@@ -85,6 +126,13 @@ object Test extends DirectTest {
/** class D */
@deprecated("use ... instead", "2.10.0")
class D
+
+ /** Get the simple value.
+ * @return the default value
+ */
+ // an intervening line comment
+ /* I had more to say, but didn't want to pollute the scaladoc. */
+ def value: Int = 7
}
""".trim