summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/build/genprod.scala2
-rw-r--r--src/compiler/scala/reflect/internal/BaseTypeSeqs.scala16
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala6
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala94
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala22
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala98
-rw-r--r--src/compiler/scala/reflect/internal/TreeGen.scala6
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala6
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala26
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala326
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala176
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala18
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala36
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala40
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala52
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala14
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala8
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala4
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala2
-rw-r--r--src/library/scala/Array.scala28
-rw-r--r--src/library/scala/DelayedInit.scala5
-rw-r--r--src/library/scala/Math.scala70
-rw-r--r--src/library/scala/collection/IterableLike.scala13
-rw-r--r--src/library/scala/collection/Iterator.scala73
-rw-r--r--src/library/scala/collection/MapLike.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala4
-rw-r--r--src/library/scala/collection/TraversableOnce.scala16
-rw-r--r--src/library/scala/collection/TraversableView.scala2
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala42
-rw-r--r--src/library/scala/collection/immutable/Map.scala2
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala26
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala8
-rw-r--r--src/library/scala/collection/immutable/Set.scala2
-rw-r--r--src/library/scala/collection/immutable/Stream.scala6
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala8
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala8
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala4
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala6
-rw-r--r--src/library/scala/concurrent/ops.scala4
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/library/scala/reflect/ClassManifest.scala18
-rw-r--r--src/library/scala/reflect/Manifest.scala14
-rwxr-xr-xsrc/library/scala/reflect/generic/Flags.scala6
-rwxr-xr-xsrc/library/scala/reflect/generic/PickleFormat.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/StdNames.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Trees.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Types.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/UnPickler.scala4
-rw-r--r--src/library/scala/util/control/Breaks.scala2
-rw-r--r--src/library/scala/util/matching/Regex.scala2
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala80
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala14
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala332
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala16
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala10
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/token/StdTokens.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/token/Tokens.scala4
-rw-r--r--src/library/scala/util/parsing/input/CharArrayReader.scala6
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala14
-rw-r--r--src/library/scala/util/parsing/input/Position.scala18
-rw-r--r--src/library/scala/util/parsing/input/Positional.scala2
-rw-r--r--src/library/scala/util/parsing/input/StreamReader.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala2
121 files changed, 1033 insertions, 1092 deletions
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 315af55d41..c0543cdcf8 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -238,7 +238,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
curryComment +
" def curried: %s => R = {\n %s\n }\n".format(
targs mkString " => ", body
- ) + """ @deprecated("Use 'curried' instead", "2.8.0")""" + "\n def curry = curried\n"
+ ) + """ @deprecated("Use `curried` instead", "2.8.0")""" + "\n def curry = curried\n"
}
override def moreMethods = curryMethod + tupleMethod
diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
index a27aea035a..bb342cf0e1 100644
--- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
@@ -13,8 +13,8 @@ import util.Statistics._
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
*
- * (1) Each element of `tp.baseTypeSeq' is a basetype of `tp'
- * (2) For each basetype `bt1' of `tp' there is an element `bt' in `tp.baseTypeSeq' such that
+ * (1) Each element of `tp.baseTypeSeq` is a basetype of `tp`
+ * (2) For each basetype `bt1` of `tp` there is an element `bt` in `tp.baseTypeSeq` such that
*
* bt.typeSymbol = bt1.typeSymbol
* bt <: bt1
@@ -96,16 +96,16 @@ trait BaseTypeSeqs {
new BaseTypeSeq(parents, arr)
}
- /** Compute new base type sequence with `tp' prepended to this sequence */
+ /** Compute new base type sequence with `tp` prepended to this sequence */
def prepend(tp: Type): BaseTypeSeq = copy(tp, 1)
- /** Compute new base type sequence with `tp' replacing the head of this sequence */
+ /** Compute new base type sequence with `tp` replacing the head of this sequence */
def updateHead(tp: Type): BaseTypeSeq = copy(tp, 0)
/** Compute new base type sequence where every element is mapped
- * with function `f'. Lazy types are mapped but not evaluated */
+ * with function `f`. Lazy types are mapped but not evaluated */
def map(f: Type => Type): BaseTypeSeq = {
- // inlined `elems map f' for performance
+ // inlined `elems map f` for performance
val len = length
var arr = new Array[Type](len)
var i = 0
@@ -139,7 +139,7 @@ trait BaseTypeSeqs {
d
}
- /** The maximum depth of type `tp' */
+ /** The maximum depth of type `tp` */
protected def maxDpth(tp: Type): Int = tp match {
case TypeRef(pre, sym, args) =>
max(maxDpth(pre), maxDpth(args) + 1)
@@ -159,7 +159,7 @@ trait BaseTypeSeqs {
1
}
- /** The maximum depth of all types `tps' */
+ /** The maximum depth of all types `tps` */
private def maxDpth(tps: Seq[Type]): Int = {
var d = 0
for (tp <- tps) d = max(d, maxDpth(tp))
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index dea811dad1..571b2ba248 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -94,13 +94,13 @@ trait NameManglers {
name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
}
- /** The expanded setter name of `name' relative to this class `base`
+ /** The expanded setter name of `name` relative to this class `base`
*/
def expandedSetterName(name: TermName, base: Symbol): TermName =
expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
- /** If `name' is an expandedName name, the original name.
- * Otherwise `name' itself.
+ /** If `name` is an expandedName name, the original name.
+ * Otherwise `name` itself.
*/
def originalName(name: Name): Name = {
var i = name.length
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index 2c5f2f9fcd..3ae15ebdec 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -25,21 +25,17 @@ trait Names /*extends reflect.generic.Names*/ {
final val nameDebug = false
- /** memory to store all names sequentially
- */
+ /** Memory to store all names sequentially. */
var chrs: Array[Char] = new Array[Char](NAME_SIZE)
private var nc = 0
- /** hashtable for finding term names quickly
- */
+ /** Hashtable for finding term names quickly. */
private val termHashtable = new Array[TermName](HASH_SIZE)
- /** hashtable for finding type names quickly
- */
+ /** Hashtable for finding type names quickly. */
private val typeHashtable = new Array[TypeName](HASH_SIZE)
- /** the hashcode of a name
- */
+ /** The hashcode of a name. */
private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
if (len > 0)
(len * (41 * 41 * 41) +
@@ -58,8 +54,7 @@ trait Names /*extends reflect.generic.Names*/ {
i == len
}
- /** enter characters into chrs array
- */
+ /** Enter characters into chrs array. */
private def enterChars(cs: Array[Char], offset: Int, len: Int) {
var i = 0
while (i < len) {
@@ -75,8 +70,7 @@ trait Names /*extends reflect.generic.Names*/ {
else nc = nc + len
}
- /** Create a term name from the characters in cs[offset..offset+len-1].
- */
+ /** Create a term name from the characters in cs[offset..offset+len-1]. */
def newTermName(cs: Array[Char], offset: Int, len: Int): TermName = {
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
@@ -89,30 +83,25 @@ trait Names /*extends reflect.generic.Names*/ {
n
}
- /** create a term name from string
- */
+ /** Create a term name from string. */
def newTermName(s: String): TermName =
newTermName(s.toCharArray(), 0, s.length())
- /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1].
- */
+ /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
val chars = Codec fromUTF8 bs.slice(offset, offset + len)
newTermName(chars, 0, chars.length)
}
- /** Create a type name from the characters in cs[offset..offset+len-1].
- */
+ /** Create a type name from the characters in cs[offset..offset+len-1]. */
def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
newTermName(cs, offset, len).toTypeName
- /** Create a type name from string
- */
+ /** Create a type name from string. */
def newTypeName(s: String): TypeName =
newTermName(s).toTypeName
- /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1].
- */
+ /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
@@ -124,8 +113,7 @@ trait Names /*extends reflect.generic.Names*/ {
def nameChars: Array[Char] = chrs
@deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
- /** An implicit conversion from names to term names
- */
+ /** An implicit conversion from names to term names. */
implicit def promoteTermNamesAsNecessary(name: Name): TermName = mkTermName(name)
@@ -136,12 +124,10 @@ trait Names /*extends reflect.generic.Names*/ {
/** Index into name table */
def start: Int = index
- /** next name in the same hash bucket
- */
+ /** The next name in the same hash bucket. */
def next: Name
- /** return the length of this name
- */
+ /** The length of this name. */
final def length: Int = len
final def isEmpty = length == 0
final def nonEmpty = !isEmpty
@@ -153,26 +139,23 @@ trait Names /*extends reflect.generic.Names*/ {
def companionName: Name
def bothNames: List[Name] = List(toTermName, toTypeName)
- /** Copy bytes of this name to buffer cs, starting at position `offset`.
- */
+ /** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
compat.Platform.arraycopy(chrs, index, cs, offset, len)
- /** return the ascii representation of this name
- */
+ /** @return the ascii representation of this name */
final def toChars: Array[Char] = {
val cs = new Array[Char](len)
copyChars(cs, 0)
cs
}
- /** return the string representation of this name
- */
+ /** @return the string representation of this name */
final override def toString(): String = new String(chrs, index, len)
def debugString() = NameTransformer.decode(toString) + (if (isTypeName) "!" else "")
/** Write to UTF8 representation of this name to given character array.
- * Start copying to index `to'. Return index of next free byte in array.
+ * Start copying to index `to`. Return index of next free byte in array.
* Array must have enough remaining space for all bytes
* (i.e. maximally 3*length bytes).
*/
@@ -182,8 +165,7 @@ trait Names /*extends reflect.generic.Names*/ {
offset + bytes.length
}
- /** return the hash value of this name
- */
+ /** @return the hash value of this name */
final override def hashCode(): Int = index
// Presently disabled.
@@ -209,17 +191,16 @@ trait Names /*extends reflect.generic.Names*/ {
false
}
- /** return the i'th Char of this name
- */
+ /** @return the i'th Char of this name */
final def apply(i: Int): Char = chrs(index + i)
- /** return the index of first occurrence of char c in this name, length if not found */
+ /** @return the index of first occurrence of char c in this name, length if not found */
final def pos(c: Char): Int = pos(c, 0)
- /** return the index of first occurrence of char c in this name, length if not found */
+ /** @return the index of first occurrence of char c in this name, length if not found */
final def pos(s: String): Int = pos(s, 0)
- /** return the index of the first occurrence of character c in
+ /** Returns the index of the first occurrence of character c in
* this name from start, length if not found.
*
* @param c the character
@@ -232,7 +213,7 @@ trait Names /*extends reflect.generic.Names*/ {
i
}
- /** return the index of the first occurrence of nonempty string s
+ /** Returns the index of the first occurrence of nonempty string s
* in this name from start, length if not found.
*
* @param s the string
@@ -252,7 +233,7 @@ trait Names /*extends reflect.generic.Names*/ {
len
}
- /** return the index of last occurrence of char c in this
+ /** Returns the index of last occurrence of char c in this
* name, -1 if not found.
*
* @param c the character
@@ -262,7 +243,7 @@ trait Names /*extends reflect.generic.Names*/ {
final def lastPos(s: String): Int = lastPos(s, len - s.length)
- /** return the index of the last occurrence of char c in this
+ /** Returns the index of the last occurrence of char c in this
* name from start, -1 if not found.
*
* @param c the character
@@ -275,7 +256,7 @@ trait Names /*extends reflect.generic.Names*/ {
i
}
- /** return the index of the last occurrence of string s in this
+ /** Returns the index of the last occurrence of string s in this
* name from start, -1 if not found.
*
* @param s the string
@@ -295,12 +276,10 @@ trait Names /*extends reflect.generic.Names*/ {
-s.length()
}
- /** does this name start with prefix?
- */
+ /** Does this name start with prefix? */
final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
- /** does this name start with prefix at given start index?
- */
+ /** Does this name start with prefix at given start index? */
final def startsWith(prefix: Name, start: Int): Boolean = {
var i = 0
while (i < prefix.length && start + i < len &&
@@ -309,12 +288,10 @@ trait Names /*extends reflect.generic.Names*/ {
i == prefix.length
}
- /** does this name end with suffix?
- */
+ /** Does this name end with suffix? */
final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
- /** does this name end with suffix just before given end index?
- */
+ /** Does this name end with suffix just before given end index? */
final def endsWith(suffix: Name, end: Int): Boolean = {
var i = 1
while (i <= suffix.length && i <= end &&
@@ -347,8 +324,7 @@ trait Names /*extends reflect.generic.Names*/ {
def lastIndexOf(ch: Char) = toChars lastIndexOf ch
- /** Return the subname with characters from start to end-1.
- */
+ /** Return the subname with characters from start to end-1. */
def subName(from: Int, to: Int): Name
/** Replace all occurrences of `from` by `to` in
@@ -365,8 +341,7 @@ trait Names /*extends reflect.generic.Names*/ {
newTermName(cs, 0, len)
}
- /** Replace operator symbols by corresponding $op_name.
- */
+ /** Replace operator symbols by corresponding $op_name. */
def encode: Name = {
val str = toString()
val res = NameTransformer.encode(str)
@@ -378,8 +353,7 @@ trait Names /*extends reflect.generic.Names*/ {
def append(suffix: String): Name
def append(suffix: Name): Name
- /** Replace $op_name by corresponding operator symbol.
- */
+ /** Replace $op_name by corresponding operator symbol. */
def decode: String = (
NameTransformer.decode(toString()) +
(if (nameDebug && isTypeName) "!" else ""))//debug
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index bb0824c5c1..5c4d44f735 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -31,10 +31,10 @@ abstract class SymbolTable extends /*reflect.generic.Universe
def abort(msg: String): Nothing = throw new Error(msg)
def abort(): Nothing = throw new Error()
- /** Are we compiling for Java SE ? */
+ /** Are we compiling for Java SE? */
// def forJVM: Boolean
- /** Are we compiling for .NET ? */
+ /** Are we compiling for .NET? */
def forMSIL: Boolean = false
/** A period is an ordinal number for a phase in a run.
@@ -63,28 +63,28 @@ abstract class SymbolTable extends /*reflect.generic.Universe
/** The current compiler run identifier. */
def currentRunId: RunId
- /** The run identifier of the given period */
+ /** The run identifier of the given period. */
final def runId(period: Period): RunId = period >> 8
- /** The phase identifier of the given period */
+ /** The phase identifier of the given period. */
final def phaseId(period: Period): Phase#Id = period & 0xFF
- /** The period at the start of run that includes `period' */
+ /** The period at the start of run that includes `period`. */
final def startRun(period: Period): Period = period & 0xFFFFFF00
- /** The current period */
+ /** The current period. */
final def currentPeriod: Period = {
//assert(per == (currentRunId << 8) + phase.id)
per
}
- /** The phase associated with given period */
+ /** The phase associated with given period. */
final def phaseOf(period: Period): Phase = phaseWithId(phaseId(period))
final def period(rid: RunId, pid: Phase#Id): Period =
(currentRunId << 8) + pid
- /** Perform given operation at given phase */
+ /** Perform given operation at given phase. */
final def atPhase[T](ph: Phase)(op: => T): T = {
val current = phase
phase = ph
@@ -113,18 +113,18 @@ abstract class SymbolTable extends /*reflect.generic.Universe
}
}
- /** Break into repl debugger if assertion is true */
+ /** Break into repl debugger if assertion is true. */
// def breakIf(assertion: => Boolean, args: Any*): Unit =
// if (assertion)
// ILoop.break(args.toList)
- /** The set of all installed infotransformers */
+ /** The set of all installed infotransformers. */
var infoTransformers = new InfoTransformer {
val pid = NoPhase.id
val changesBaseClasses = true
def transform(sym: Symbol, tpe: Type): Type = tpe
}
- /** The phase which has given index as identifier */
+ /** The phase which has given index as identifier. */
val phaseWithId: Array[Phase]
}
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 5cdd979a3e..858152870b 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -184,10 +184,10 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
final def newSyntheticValueParam(argtype: Type): Symbol =
newSyntheticValueParams(List(argtype)).head
- /** Type skolems are type parameters ``seen from the inside''
+ /** Type skolems are type parameters ''seen from the inside''
* Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
- * with name `T' in its typeParams list. While type checking the parameters, result type and
- * body of the method, there's a local copy of `T' which is a TypeSkolem.
+ * with name `T` in its typeParams list. While type checking the parameters, result type and
+ * body of the method, there's a local copy of `T` which is a TypeSkolem.
*/
final def newTypeSkolem: Symbol =
new TypeSkolem(owner, pos, name.toTypeName, this)
@@ -503,7 +503,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
final def isEffectivelyFinal: Boolean = isFinal || isTerm && (
hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
- /** Is this symbol locally defined? I.e. not accessed from outside `this' instance */
+ /** Is this symbol locally defined? I.e. not accessed from outside `this` instance */
final def isLocal: Boolean = owner.isTerm
/** Is this symbol a constant? */
@@ -533,14 +533,13 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
(isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
- /** Is this symbol a member of class `clazz'
- */
+ /** Is this symbol a member of class `clazz`? */
def isMemberOf(clazz: Symbol) =
clazz.info.member(name).alternatives contains this
- /** A a member of class `base' is incomplete if
+ /** A a member of class `base` is incomplete if
* (1) it is declared deferred or
- * (2) it is abstract override and its super symbol in `base' is
+ * (2) it is abstract override and its super symbol in `base` is
* nonexistent or incomplete.
*
* @param base ...
@@ -1100,7 +1099,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
/** A total ordering between symbols that refines the class
* inheritance graph (i.e. subclass.isLess(superclass) always holds).
- * the ordering is given by: (_.isType, -_.baseTypeSeq.length) for type symbols, followed by `id'.
+ * the ordering is given by: (_.isType, -_.baseTypeSeq.length) for type symbols, followed by `id`.
*/
final def isLess(that: Symbol): Boolean = {
def baseTypeSeqLength(sym: Symbol) =
@@ -1163,11 +1162,11 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
// ------ cloneing -------------------------------------------------------------------
- /** A clone of this symbol */
+ /** A clone of this symbol. */
final def cloneSymbol: Symbol =
cloneSymbol(owner)
- /** A clone of this symbol, but with given owner */
+ /** A clone of this symbol, but with given owner. */
final def cloneSymbol(owner: Symbol): Symbol = {
val newSym = cloneSymbolImpl(owner)
newSym.privateWithin = privateWithin
@@ -1175,19 +1174,18 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
.setFlag(this.rawflags).setAnnotations(this.annotations)
}
- /** Internal method to clone a symbol's implementation without flags or type
- */
+ /** Internal method to clone a symbol's implementation without flags or type. */
def cloneSymbolImpl(owner: Symbol): Symbol
// ------ access to related symbols --------------------------------------------------
- /** The next enclosing class */
+ /** The next enclosing class. */
def enclClass: Symbol = if (isClass) this else owner.enclClass
- /** The next enclosing method */
+ /** The next enclosing method. */
def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
- /** The primary constructor of a class */
+ /** The primary constructor of a class. */
def primaryConstructor: Symbol = {
var c = info.decl(
if (isTrait || isImplClass) nme.MIXIN_CONSTRUCTOR
@@ -1202,7 +1200,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
*/
def thisSym: Symbol = this
- /** The type of `this' in a class, or else the type of the symbol itself. */
+ /** The type of `this` in a class, or else the type of the symbol itself. */
def typeOfThis = thisSym.tpe
/** If symbol is a class, the type <code>this.type</code> in this class,
@@ -1251,23 +1249,23 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
/** The symbol accessed by this accessor (getter or setter) function. */
final def accessed: Symbol = accessed(owner.info)
- /** The symbol accessed by this accessor function, but with given owner type */
+ /** The symbol accessed by this accessor function, but with given owner type. */
final def accessed(ownerTp: Type): Symbol = {
assert(hasAccessorFlag)
ownerTp.decl(nme.getterToLocal(if (isSetter) nme.setterToGetter(name) else name))
}
/** The module corresponding to this module class (note that this
- * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
+ * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass.
*/
def sourceModule: Symbol = NoSymbol
- /** The implementation class of a trait */
+ /** The implementation class of a trait. */
final def implClass: Symbol = owner.info.decl(nme.implClassName(name))
- /** The class that is logically an outer class of given `clazz'.
+ /** The class that is logically an outer class of given `clazz`.
* This is the enclosing class, except for classes defined locally to constructors,
- * where it is the outer class of the enclosing class
+ * where it is the outer class of the enclosing class.
*/
final def outerClass: Symbol =
if (owner.isClass) owner
@@ -1275,11 +1273,11 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
else owner.outerClass
/** For a paramaccessor: a superclass paramaccessor for which this symbol
- * is an alias, NoSymbol for all others
+ * is an alias, NoSymbol for all others.
*/
def alias: Symbol = NoSymbol
- /** For a lazy value, its lazy accessor. NoSymbol for all others */
+ /** For a lazy value, its lazy accessor. NoSymbol for all others. */
def lazyAccessor: Symbol = NoSymbol
/** If this is a lazy value, the lazy accessor; otherwise this symbol. */
@@ -1290,7 +1288,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
*/
def outerSource: Symbol = NoSymbol
- /** The superclass of this class */
+ /** The superclass of this class. */
def superClass: Symbol = if (info.parents.isEmpty) NoSymbol else info.parents.head.typeSymbol
/** The directly or indirectly inherited mixins of this class
@@ -1302,8 +1300,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
ancestors takeWhile (sc ne)
}
- /** All directly or indirectly inherited classes.
- */
+ /** All directly or indirectly inherited classes. */
def ancestors: List[Symbol] = info.baseClasses drop 1
/** The package class containing this symbol, or NoSymbol if there
@@ -1346,7 +1343,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
*
* def f() { val x = { def g() = ...; g() } }
*
- * In this case the owner chain of `g' is `x', followed by `f' and
+ * In this case the owner chain of `g` is `x`, followed by `f` and
* `g.logicallyEnclosingMember == f`.
*
* Example 2:
@@ -1356,9 +1353,9 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
* val x = { def g() = ...; g() } }
* }
*
- * In this case the owner chain of `g' is `x', followed by `C' but
- * g.logicallyEnclosingMember is the primary constructor symbol `<init>'
- * (or, for traits: `$init') of `C'.
+ * In this case the owner chain of `g` is `x`, followed by `C` but
+ * g.logicallyEnclosingMember is the primary constructor symbol `<init>`
+ * (or, for traits: `$init`) of `C`.
*
*/
def logicallyEnclosingMember: Symbol =
@@ -1366,14 +1363,13 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
else if (isMethod || isClass) this
else owner.logicallyEnclosingMember
- /** The top-level class containing this symbol */
+ /** The top-level class containing this symbol. */
def toplevelClass: Symbol =
if (owner.isPackageClass) {
if (isClass) this else moduleClass
} else owner.toplevelClass
- /** Is this symbol defined in the same scope and compilation unit as `that' symbol?
- */
+ /** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
def isCoDefinedWith(that: Symbol) = (
(this.rawInfo ne NoType) &&
(this.owner == that.owner) && {
@@ -1513,40 +1509,40 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
ofclazz.info.nonPrivateDecl(name).filter(sym =>
!sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
- /** The non-private member of `site' whose type and name match the type of this symbol
- */
+ /** The non-private member of `site` whose type and name match the type of this symbol. */
final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
!sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
- /** The symbol overridden by this symbol in given class `ofclazz'.
- * @pre 'ofclazz' is a base class of this symbol's owner.
+ /** The symbol overridden by this symbol in given class `ofclazz`.
+ *
+ * @param ofclazz is a base class of this symbol's owner.
*/
final def overriddenSymbol(ofclazz: Symbol): Symbol =
if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
- /** The symbol overriding this symbol in given subclass `ofclazz'
- * @pre: `ofclazz' is a subclass of this symbol's owner
+ /** The symbol overriding this symbol in given subclass `ofclazz`.
+ *
+ * @param ofclazz is a subclass of this symbol's owner
*/
final def overridingSymbol(ofclazz: Symbol): Symbol =
if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
- /** Returns all symbols overriden by this symbol
- */
+ /** Returns all symbols overriden by this symbol. */
final def allOverriddenSymbols: List[Symbol] =
if (!owner.isClass) Nil
else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
/** Returns all symbols overridden by this symbol, plus all matching symbols
- * defined in parents of the selftype
+ * defined in parents of the selftype.
*/
final def extendedOverriddenSymbols: List[Symbol] =
if (!owner.isClass) Nil
else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
/** The symbol accessed by a super in the definition of this symbol when
- * seen from class `base'. This symbol is always concrete.
- * pre: `this.owner' is in the base class sequence of `base'.
+ * seen from class `base`. This symbol is always concrete.
+ * pre: `this.owner` is in the base class sequence of `base`.
*/
final def superSymbol(base: Symbol): Symbol = {
var bcs = base.info.baseClasses.dropWhile(owner !=).tail
@@ -1559,7 +1555,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
sym
}
- /** The getter of this value or setter definition in class `base', or NoSymbol if
+ /** The getter of this value or setter definition in class `base`, or NoSymbol if
* none exists.
*/
final def getter(base: Symbol): Symbol = {
@@ -1594,7 +1590,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
* where it was unpacked. Resulttype is AnyRef because trees are not visible here. */
def unpackLocation: AnyRef = null
- /** Remove private modifier from symbol `sym's definition. If `sym' is a
+ /** Remove private modifier from symbol `sym`s definition. If `sym` is a
* term symbol rename it by expanding its name to avoid name clashes
*/
final def makeNotPrivate(base: Symbol) {
@@ -1608,7 +1604,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
}
}
- /** change name by appending $$<fully-qualified-name-of-class `base'>
+ /** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters
*/
def expandName(base: Symbol) {
@@ -1999,12 +1995,16 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
/** Let's say you have a type definition
*
+ * {{{
* type T <: Number
+ * }}}
*
* and tsym is the symbol corresponding to T. Then
*
+ * {{{
* tsym.info = TypeBounds(Nothing, Number)
* tsym.tpe = TypeRef(NoPrefix, T, List())
+ * }}}
*/
override def tpe: Type = {
if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
@@ -2077,7 +2077,7 @@ trait Symbols /* extends reflect.generic.Symbols*/ { self: SymbolTable =>
* the type parameter from which the skolem was created. If it got created from
* skolemizeExistential, origin is either null or a Tree. If it is a Tree, it indicates
* where the skolem was introduced (this is important for knowing when to pack it
- * again into ab Existential). origin is `null' only in skolemizeExistentials called
+ * again into ab Existential). origin is `null` only in skolemizeExistentials called
* from <:< or isAsSpecific, because here its value does not matter.
* I elieve the following invariant holds:
*
diff --git a/src/compiler/scala/reflect/internal/TreeGen.scala b/src/compiler/scala/reflect/internal/TreeGen.scala
index faf793527e..379782c615 100644
--- a/src/compiler/scala/reflect/internal/TreeGen.scala
+++ b/src/compiler/scala/reflect/internal/TreeGen.scala
@@ -121,7 +121,7 @@ abstract class TreeGen {
None
}
- /** Cast `tree' to type `pt' */
+ /** Cast `tree` to type `pt` */
def mkCast(tree: Tree, pt: Type): Tree = {
if (settings.debug.value) log("casting " + tree + ":" + tree.tpe + " to " + pt)
assert(!tree.tpe.isInstanceOf[MethodType], tree)
@@ -178,7 +178,7 @@ abstract class TreeGen {
def mkAsInstanceOf(value: Tree, tpe: Type, any: Boolean = true): Tree =
mkTypeApply(value, tpe, (if (any) Any_asInstanceOf else Object_asInstanceOf))
- /** Cast `tree' to 'pt', unless tpe is a subtype of pt, or pt is Unit. */
+ /** Cast `tree` to `pt`, unless tpe is a subtype of pt, or pt is Unit. */
def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
if ((pt == UnitClass.tpe) || (tpe <:< pt)) {
log("no need to cast from " + tpe + " to " + pt)
@@ -237,4 +237,4 @@ abstract class TreeGen {
// tree1 OR tree2
def mkOr(tree1: Tree, tree2: Tree): Tree =
Apply(Select(tree1, Boolean_or), List(tree2))
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 2ccd122a6c..44913699b4 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -141,13 +141,13 @@ abstract class TreeInfo {
case _ => false
}
- /** The first constructor definitions in `stats' */
+ /** The first constructor definitions in `stats` */
def firstConstructor(stats: List[Tree]): Tree = stats find {
case x: DefDef => nme.isConstructorName(x.name)
case _ => false
} getOrElse EmptyTree
- /** The arguments to the first constructor in `stats'. */
+ /** The arguments to the first constructor in `stats`. */
def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
case DefDef(_, _, _, args :: _, _, _) => args
case _ => Nil
@@ -204,7 +204,7 @@ abstract class TreeInfo {
((first.isLower && first.isLetter) || first == '_') && !reserved(name)
}
- /** Is tree a this node which belongs to `enclClass'? */
+ /** Is tree a this node which belongs to `enclClass`? */
def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
case This(_) => tree.symbol == enclClass
case _ => false
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 2ee1a59355..4d202c6c60 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -27,7 +27,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
/** @param privateWithin the qualifier for a private (a type name)
* or tpnme.EMPTY, if none is given.
* @param annotations the annotations for the definition.
- * <strong>Note:</strong> the typechecker drops these annotations,
+ * '''Note:''' the typechecker drops these annotations,
* use the AnnotationInfo's (Symbol.annotations) in later phases.
*/
case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) extends HasFlags {
@@ -132,22 +132,22 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
def isErroneous = (this.tpe ne null) && this.tpe.isErroneous
def isTyped = (this.tpe ne null) && !this.tpe.isErroneous
- /** Apply `f' to each subtree */
+ /** Apply `f` to each subtree */
def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
- /** If 'pf' is defined for a given subtree, call super.traverse(pf(tree)),
+ /** If `pf` is defined for a given subtree, call super.traverse(pf(tree)),
* otherwise super.traverse(tree).
*/
def foreachPartial(pf: PartialFunction[Tree, Tree]) { new ForeachPartialTreeTraverser(pf).traverse(this) }
- /** Find all subtrees matching predicate `p' */
+ /** Find all subtrees matching predicate `p` */
def filter(f: Tree => Boolean): List[Tree] = {
val ft = new FilterTreeTraverser(f)
ft.traverse(this)
ft.hits.toList
}
- /** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p',
+ /** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
*/
def find(p: Tree => Boolean): Option[Tree] = {
@@ -162,7 +162,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
}
}
- /** Is there part of this tree which satisfies predicate `p'? */
+ /** Is there part of this tree which satisfies predicate `p`? */
def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
def equalsStructure(that : Tree) = equalsStructure0(that)(_ eq _)
@@ -491,7 +491,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
*/
case class ApplyDynamic(qual: Tree, args: List[Tree])
extends TermTree with SymTree
- // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
+ // The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
/** Super reference, qual = corresponding this reference */
case class Super(qual: Tree, mix: TypeName) extends TermTree {
@@ -564,7 +564,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
/** A synthetic tree holding an arbitrary type. Not to be confused with
* with TypTree, the trait for trees that are only used for type trees.
* TypeTree's are inserted in several places, but most notably in
- * <code>RefCheck</code>, where the arbitrary type trees are all replaced by
+ * `RefCheck`, where the arbitrary type trees are all replaced by
* TypeTree's. */
case class TypeTree() extends TypTree {
private var orig: Tree = null
@@ -628,9 +628,9 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
* supercall arguments and template body.
*
* @param sym the class symbol
- * @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)'
+ * @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
- * should be owned by `sym'
+ * should be owned by `sym`
* @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
@@ -689,13 +689,13 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
DefDef(sym, rhs(sym.info.paramss))
}
- /** A TypeDef node which defines given `sym' with given tight hand side `rhs'. */
+ /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
atPos(sym.pos) {
TypeDef(Modifiers(sym.flags), sym.name.toTypeName, sym.typeParams map TypeDef, rhs) setSymbol sym
}
- /** A TypeDef node which defines abstract type or type parameter for given `sym' */
+ /** A TypeDef node which defines abstract type or type parameter for given `sym` */
def TypeDef(sym: Symbol): TypeDef =
TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
@@ -1561,7 +1561,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: SymbolTable =>
override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to)
}
- /** Substitute symbols in 'from' with symbols in 'to'. Returns a new
+ /** Substitute symbols in `from` with symbols in `to`. Returns a new
* tree using the new symbols and whose Ident and Select nodes are
* name-consistent with the new symbols.
*/
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index cc5c2fa5c1..408c287d24 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -56,7 +56,7 @@ import util.Statistics._
case PolyType(tparams, result) =>
// [tparams]result where result is a (Nullary)MethodType or ClassInfoType
- // The remaining types are not used after phase `typer'.
+ // The remaining types are not used after phase `typer`.
case OverloadedType(pre, tparams, alts) =>
// all alternatives of an overloaded ident
case AntiPolyType(pre, targs) =>
@@ -85,16 +85,16 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
private final val LogPendingBaseTypesThreshold = 50
private final val LogVolatileThreshold = 50
- /** A don't care value for the depth parameter in lubs/glbs and related operations */
+ /** A don't care value for the depth parameter in lubs/glbs and related operations. */
private final val AnyDepth = -3
- /** Decrement depth unless it is a don't care */
+ /** Decrement depth unless it is a don't care. */
private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
private final val printLubs = false
/** The current skolemization level, needed for the algorithms
- * in isSameType, isSubType that do constraint solving under a prefix
+ * in isSameType, isSubType that do constraint solving under a prefix.
*/
var skolemizationLevel = 0
@@ -105,8 +105,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
private type UndoLog = List[(TypeVar, TypeConstraint)]
private[scala] var log: UndoLog = List()
- /** Undo all changes to constraints to type variables upto `limit'
- */
+ /** Undo all changes to constraints to type variables upto `limit`. */
private def undoTo(limit: UndoLog) {
while ((log ne limit) && log.nonEmpty) {
val (tv, constr) = log.head
@@ -158,7 +157,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
//import gen._
- /** A proxy for a type (identified by field `underlying') that forwards most
+ /** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
* every operation that is overridden for some kind of types should be forwarded.
*/
@@ -199,7 +198,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
override def baseClasses = underlying.baseClasses
}
- /** A proxy for a type (identified by field `underlying') that forwards most
+ /** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it. Every operation that is overridden for some kind of types is
* forwarded here. Some operations are rewrapped again.
*/
@@ -257,21 +256,21 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def isVolatile: Boolean = false
- /** Is this type guaranteed not to have `null' as a value? */
+ /** Is this type guaranteed not to have `null` as a value? */
def isNotNull: Boolean = false
- /** Is this type a structural refinement type (it 'refines' members that have not been inherited) */
+ /** Is this type a structural refinement type (it ''refines'' members that have not been inherited) */
def isStructuralRefinement: Boolean = false
/** Does this type depend immediately on an enclosing method parameter?
- * i.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)
+ * I.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)?
*/
def isImmediatelyDependent: Boolean = false
/** Does this depend on an enclosing method parameter? */
def isDependent: Boolean = IsDependentCollector.collect(this)
- /** True for WildcardType or BoundedWildcardType */
+ /** True for WildcardType or BoundedWildcardType. */
def isWildcard = false
/** Is this type produced as a repair for an error? */
@@ -290,11 +289,10 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
def isFinalType =
typeSymbol.isFinal && (typeSymbol.typeParams forall (_.variance == 0))
- /** Is this type completed (i.e. not a lazy type)?
- */
+ /** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
- /** If this is a lazy type, assign a new type to `sym'. */
+ /** If this is a lazy type, assign a new type to `sym`. */
def complete(sym: Symbol) {}
/** The term symbol associated with the type
@@ -307,20 +305,17 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def typeSymbol: Symbol = NoSymbol
- /** The term symbol *directly* associated with the type
- */
+ /** The term symbol ''directly'' associated with the type. */
def termSymbolDirect: Symbol = termSymbol
- /** The type symbol *directly* associated with the type
- */
+ /** The type symbol ''directly'' associated with the type. */
def typeSymbolDirect: Symbol = typeSymbol
- /** The base type underlying a type proxy,
- * identity on all other types */
+ /** The base type underlying a type proxy, identity on all other types */
def underlying: Type = this
/** Widen from singleton type to its underlying non-singleton
- * base type by applying one or more `underlying' dereferences,
+ * base type by applying one or more `underlying` dereferences,
* identity for all other types.
*
* class Outer { class C ; val x: C }
@@ -334,13 +329,14 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def deconst: Type = this
- /** The type of `this' of a class type or reference type
- */
+ /** The type of `this` of a class type or reference type. */
def typeOfThis: Type = typeSymbol.typeOfThis
/** Map to a singleton type which is a subtype of this type.
- * The fallback implemented here gives
+ * The fallback implemented here gives:
+ * {{{
* T.narrow = (T {}).this.type
+ * }}}
* Overridden where we know more about where types come from.
*
* todo: change to singleton type of an existentially defined variable
@@ -474,13 +470,13 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def decls: Scope = EmptyScope
- /** The defined or declared members with name `name' in this type;
+ /** The defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
- /** The non-private defined or declared members with name `name' in this type;
+ /** The non-private defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Alternatives of overloaded symbol appear in the order they are declared.
*/
@@ -540,9 +536,9 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def baseType(clazz: Symbol): Type = NoType
- /** This type as seen from prefix `pre' and class `clazz'. This means:
- * Replace all thistypes of `clazz' or one of its subclasses
- * by `pre' and instantiate all parameters by arguments of `pre'.
+ /** This type as seen from prefix `pre` and class `clazz`. This means:
+ * Replace all thistypes of `clazz` or one of its subclasses
+ * by `pre` and instantiate all parameters by arguments of `pre`.
* Proceed analogously for thistypes referring to outer classes.
*
* Example:
@@ -562,18 +558,20 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
result
} else this
- /** The info of `sym', seen as a member of this type.
+ /** The info of `sym`, seen as a member of this type.
*
* Example:
+ * {{{
* class D[T] { def m: T }
* class C extends p.D[Int]
* ThisType(C).memberType(m) = Int
+ * }}}
*/
def memberInfo(sym: Symbol): Type = {
sym.info.asSeenFrom(this, sym.owner)
}
- /** The type of `sym', seen as a member of this type. */
+ /** The type of `sym`, seen as a member of this type. */
def memberType(sym: Symbol): Type = sym match {
case meth: MethodSymbol =>
meth.typeAsMemberOf(this)
@@ -588,15 +586,15 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
tp.asSeenFrom(this, sym.owner)
}
- /** Substitute types `to' for occurrences of references to
- * symbols `from' in this type.
+ /** Substitute types `to` for occurrences of references to
+ * symbols `from` in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
if (from.isEmpty) this
else new SubstTypeMap(from, to) apply this
- /** Substitute symbols `to' for occurrences of symbols
- * `from' in this type.
+ /** Substitute symbols `to` for occurrences of symbols `from` in this type.
+ *
* !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
* first, as otherwise symbols will immediately get rebound in typeRef to the old
* symbol.
@@ -605,8 +603,8 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
if (from eq to) this
else new SubstSymMap(from, to) apply this
- /** Substitute all occurrences of `ThisType(from)' in this type
- * by `to'.
+ /** Substitute all occurrences of `ThisType(from)` in this type by `to`.
+ *
* !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
* first, as otherwise symbols will immediately get rebound in typeRef to the old
* symbol.
@@ -617,23 +615,23 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
def substSuper(from: Type, to: Type): Type =
new SubstSuperMap(from, to) apply this
- /** Returns all parts of this type which satisfy predicate `p' */
+ /** Returns all parts of this type which satisfy predicate `p` */
def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p).collect(this).toList
- /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p',
+ /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
*/
def find(p: Type => Boolean): Option[Type] = new FindTypeCollector(p).collect(this)
- /** Apply `f' to each part of this type */
+ /** Apply `f` to each part of this type */
def foreach(f: Type => Unit) { new ForEachTypeTraverser(f).traverse(this) }
- /** Apply `f' to each part of this type; children get mapped before their parents */
+ /** Apply `f` to each part of this type; children get mapped before their parents */
def map(f: Type => Type): Type = new TypeMap {
def apply(x: Type) = f(mapOver(x))
} apply this
- /** Is there part of this type which satisfies predicate `p'? */
+ /** Is there part of this type which satisfies predicate `p`? */
def exists(p: Type => Boolean): Boolean = !find(p).isEmpty
/** Does this type contain a reference to this symbol? */
@@ -704,8 +702,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
else isSameType(this, that))
);
- /** Does this type implement symbol `sym' with same or stronger type?
- */
+ /** Does this type implement symbol `sym` with same or stronger type? */
def specializes(sym: Symbol): Boolean =
if (explainSwitch) explain("specializes", specializesSym, this, sym)
else specializesSym(this, sym)
@@ -724,8 +721,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
- /** Same as matches, except that non-method types are always assumed to match.
- */
+ /** Same as matches, except that non-method types are always assumed to match. */
def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
/** The shortest sorted upwards closed array of types that contains
@@ -780,12 +776,11 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
}
/** If this is a poly- or methodtype, a copy with cloned type / value parameters
- * owned by `owner'. Identity for all other types.
+ * owned by `owner`. Identity for all other types.
*/
def cloneInfo(owner: Symbol) = this
- /** Make sure this type is correct as the info of given owner; clone it if not.
- */
+ /** Make sure this type is correct as the info of given owner; clone it if not. */
def atOwner(owner: Symbol) = this
protected def objectPrefix = "object "
@@ -816,14 +811,14 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
*/
def safeToString: String = super.toString
- /** The string representation of this type, with singletypes explained */
+ /** The string representation of this type, with singletypes explained. */
def toLongString = {
val str = toString
if (str endsWith ".type") str + " (with underlying type " + widen + ")"
else str
}
- /** A test whether a type contains any unification type variables */
+ /** A test whether a type contains any unification type variables. */
def isGround: Boolean = this match {
case TypeVar(_, constr) =>
constr.instValid && constr.inst.isGround
@@ -837,9 +832,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
typeVarToOriginMap(this) eq this
}
- /** If this is a symbol loader type, load and assign a new type to
- * `sym'.
- */
+ /** If this is a symbol loader type, load and assign a new type to `sym`. */
def load(sym: Symbol) {}
private def findDecl(name: Name, excludedFlags: Int): Symbol = {
@@ -864,7 +857,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
* Find member(s) in this type. If several members matching criteria are found, they are
* returned in an OverloadedSymbol
*
- * @param name The member's name, where nme.ANYNAME means `unspecified'
+ * @param name The member's name, where nme.ANYNAME means `unspecified`
* @param excludedFlags Returned members do not have these flags
* @param requiredFlags Returned members do have these flags
* @param stableOnly If set, return only members that are types or stable values
@@ -1171,9 +1164,8 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
}
}
- /** A class for singleton types of the form <prefix>.<sym.name>.type.
- * Cannot be created directly; one should always use
- * `singleType' for creation.
+ /** A class for singleton types of the form `<prefix>.<sym.name>.type`.
+ * Cannot be created directly; one should always use `singleType` for creation.
*/
abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
override val isTrivial: Boolean = pre.isTrivial
@@ -1421,9 +1413,9 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
}
/** A class representing intersection types with refinements of the form
- * `<parents_0> with ... with <parents_n> { decls }'
+ * `<parents_0> with ... with <parents_n> { decls }`
* Cannot be created directly;
- * one should always use `refinedType' for creation.
+ * one should always use `refinedType` for creation.
*/
case class RefinedType(override val parents: List[Type],
override val decls: Scope) extends CompoundType {
@@ -1468,7 +1460,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
* either i > 1, or decls or a following parent Pj, j > 1, contributes
* an abstract member.
* A type contributes an abstract member if it has an abstract member which
- * is also a member of the whole refined type. A scope `decls' contributes
+ * is also a member of the whole refined type. A scope `decls` contributes
* an abstract member if it has an abstract definition which is also
* a member of the whole type.
*/
@@ -1532,7 +1524,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
getRefs(Expansive, tparam)
}
- /* The rest of this class is auxiliary code for `expansiveRefs'
+ /* The rest of this class is auxiliary code for `expansiveRefs`
*/
/** The type parameters which are referenced type parameters of this class.
@@ -1562,7 +1554,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
}
/** Augment existing refs map with references <pre>from -> sym</pre>, for
- * all elements <pre>sym</pre> of set `to'.
+ * all elements <pre>sym</pre> of set `to`.
* @param which <- {NonExpansive, Expansive}
*/
private def addRefs(which: Int, from: Symbol, to: Set[Symbol]) {
@@ -1578,7 +1570,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
// (this can happen only for erroneous programs).
}
- /** Compute initial (one-step) references and set state to `Initializing'.
+ /** Compute initial (one-step) references and set state to `Initializing`.
*/
private def computeRefs() {
refs = Array(Map(), Map())
@@ -1662,8 +1654,8 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
}
final class UniqueConstantType(value: Constant) extends ConstantType(value) with UniqueType {
- /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
- * which might not be found after 'flatten'. */
+ /** Save the type of `value`. For Java enums, it depends on finding the linked class,
+ * which might not be found after `flatten`. */
private lazy val _tpe: Type = value.tpe
override def underlying: Type = _tpe
}
@@ -1677,8 +1669,8 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
private val pendingVolatiles = new mutable.HashSet[Symbol]
/** A class for named types of the form
- * `<prefix>.<sym.name>[args]'
- * Cannot be created directly; one should always use `typeRef'
+ * `<prefix>.<sym.name>[args]`
+ * Cannot be created directly; one should always use `typeRef`
* for creation. (@M: Otherwise hashing breaks)
*
* @M: a higher-kinded type is represented as a TypeRef with sym.info.typeParams.nonEmpty, but args.isEmpty
@@ -1747,7 +1739,7 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
override def isNotNull =
sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull
- // @M: propagate actual type params (args) to `tp', by replacing formal type parameters with actual ones
+ // @M: propagate actual type params (args) to `tp`, by replacing formal type parameters with actual ones
// if tp is higher kinded, the "actual" type arguments are types that simply reference the corresponding type parameters (unbound type variables)
def transform(tp: Type): Type = {
val res = tp.asSeenFrom(pre, sym.owner)
@@ -1787,11 +1779,11 @@ trait Types /*extends reflect.generic.Types*/ { self: SymbolTable =>
override def termSymbolDirect = super.termSymbol
/* @MAT
-whenever you see `tp.typeSymbol.isXXXX' and then act on tp based on that predicate, you're on thin ice,
-as `typeSymbol' (and `prefix') automatically normalize, but the other inspectors don't.
-In other words, even if `tp.normalize.sym.isXXX' is true, `tp.sym.isXXX' may be false (if sym were a public method to access the non-normalized typeSymbol)...
+whenever you see `tp.typeSymbol.isXXXX` and then act on tp based on that predicate, you're on thin ice,
+as `typeSymbol` (and `prefix`) automatically normalize, but the other inspectors don't.
+In other words, even if `tp.normalize.sym.isXXX` is true, `tp.sym.isXXX` may be false (if sym were a public method to access the non-normalized typeSymbol)...
-In retrospect, I think `tp.typeSymbol.isXXX' or (worse) `tp.typeSymbol==XXX' should be replaced by `val tp = tp0.asXXX'.
+In retrospect, I think `tp.typeSymbol.isXXX` or (worse) `tp.typeSymbol==XXX` should be replaced by `val tp = tp0.asXXX`.
A type's typeSymbol should never be inspected directly.
*/
@@ -1843,7 +1835,7 @@ A type's typeSymbol should never be inspected directly.
override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
override def typeConstructor = TypeRef(pre, sym, Nil)
- // note: does not go through typeRef. There's no need to because neither `pre' nor `sym' changes.
+ // note: does not go through typeRef. There's no need to because neither `pre` nor `sym` changes.
// And there's a performance advantage to call TypeRef directly.
@@ -2292,7 +2284,7 @@ A type's typeSymbol should never be inspected directly.
}
/** A class containing the alternatives and type prefix of an overloaded symbol.
- * Not used after phase `typer'.
+ * Not used after phase `typer`.
*/
case class OverloadedType(pre: Type, alternatives: List[Symbol]) extends Type {
override def prefix: Type = pre
@@ -2303,7 +2295,7 @@ A type's typeSymbol should never be inspected directly.
/** A class remembering a type instantiation for some a set of overloaded
* polymorphic symbols.
- * Not used after phase `typer'.
+ * Not used after phase `typer`.
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
@@ -2358,7 +2350,7 @@ A type's typeSymbol should never be inspected directly.
}
/** A class representing a type variable
- * Not used after phase `typer'.
+ * Not used after phase `typer`.
* A higher-kinded type variable has type arguments (a list of Type's) and type parameters (list of Symbols)
* A TypeVar whose list of args is non-empty can only be instantiated by a higher-kinded type that can be applied to these args
* a typevar is much like a typeref, except it has special logic for type equality/subtyping
@@ -2375,11 +2367,10 @@ A type's typeSymbol should never be inspected directly.
/** The variable's skolemization level */
val level = skolemizationLevel
- /**
- * two occurrences of a higher-kinded typevar, e.g. ?CC[Int] and ?CC[String], correspond to
- * *two instances* of TypeVar that share the *same* TypeConstraint
- * constr for ?CC only tracks type constructors anyway, so when ?CC[Int] <:< List[Int] and ?CC[String] <:< Iterable[String]
- * ?CC's hibounds contains List and Iterable
+ /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
+ * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`
+ * `constr` for `?CC` only tracks type constructors anyway, so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
+ * `?CC's` hibounds contains List and Iterable
*/
def applyArgs(newArgs: List[Type]): TypeVar =
if (newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...)
@@ -2453,18 +2444,24 @@ A type's typeSymbol should never be inspected directly.
* no type parameters, or we are comparing to Any/Nothing.
*
* The latter condition is needed because HK unification is limited to constraints of the shape
+ * {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
+ * }}}
* which would preclude the following important constraints:
+ * {{{
* Nothing <: ?TC[?T]
* ?TC[?T] <: Any
+ * }}}
*/
def unifySimple = (params.isEmpty || tp.typeSymbol == NothingClass || tp.typeSymbol == AnyClass) &&
addBound(tp)
/** Full case: involving a check of the form
+ * {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
+ * }}}
* Checks subtyping of higher-order type vars, and uses variances as defined in the
- * type parameter we're trying to infer (the result will be sanity-checked later)
+ * type parameter we're trying to infer (the result will be sanity-checked later).
*/
def unifyFull(tp: Type) = sameLength(typeArgs, tp.typeArgs) && { // this is a higher-kinded type var with same arity as tp
// side effect: adds the type constructor itself as a bound
@@ -2473,8 +2470,7 @@ A type's typeSymbol should never be inspected directly.
else isSubArgs(typeArgs, tp.typeArgs, params)
}
- /** TODO: need positive/negative test cases demonstrating this is correct.
- */
+ /** TODO: need positive/negative test cases demonstrating this is correct. */
def unifyParents =
if (isLowerBound) tp.parents exists unifyFull
else tp.parents forall unifyFull
@@ -2508,10 +2504,10 @@ A type's typeSymbol should never be inspected directly.
}
/**
- * ?A.T =:= tp is rewritten as the constraint ?A <: {type T = tp}
+ * `?A.T =:= tp` is rewritten as the constraint `?A <: {type T = tp}`
*
- * TODO: make these constraints count (incorporate them into implicit search in applyImplicitArgs)
- * (T corresponds to @param sym)
+ * TODO: make these constraints count (incorporate them into implicit search in `applyImplicitArgs`)
+ * (`T` corresponds to @param sym)
*/
def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
val bound = refinedType(List(WildcardType), NoSymbol)
@@ -2521,8 +2517,8 @@ A type's typeSymbol should never be inspected directly.
registerBound(bound, false)
}
- /** Can this variable be related in a constraint to type `tp'?
- * This is not the case if `tp' contains type skolems whose
+ /** Can this variable be related in a constraint to type `tp`?
+ * This is not the case if `tp` contains type skolems whose
* skolemization level is higher than the level of this variable.
*/
def isRelatable(tp: Type): Boolean =
@@ -2563,12 +2559,12 @@ A type's typeSymbol should never be inspected directly.
}
/** A type carrying some annotations. Created by the typechecker
- * when eliminating ``Annotated'' trees (see typedAnnotated).
+ * when eliminating ''Annotated'' trees (see typedAnnotated).
*
* @param annotations the list of annotations on the type
* @param underlying the type without the annotation
- * @param selfsym a "self" symbol with type <code>underlying</code>;
- * only available if -Yself-in-annots is turned on. Can be NoSymbol
+ * @param selfsym a "self" symbol with type `underlying`;
+ * only available if -Yself-in-annots is turned on. Can be `NoSymbol`
* if it is not used.
*/
case class AnnotatedType(override val annotations: List[AnnotationInfo],
@@ -2653,8 +2649,7 @@ A type's typeSymbol should never be inspected directly.
// Creators ---------------------------------------------------------------
- /** Rebind symbol `sym' to an overriding member in type `pre'.
- */
+ /** Rebind symbol `sym` to an overriding member in type `pre`. */
private def rebind(pre: Type, sym: Symbol): Symbol = {
val owner = sym.owner
if (owner.isClass && owner != pre.typeSymbol && !sym.isEffectivelyFinal && !sym.isClass) {
@@ -2668,9 +2663,7 @@ A type's typeSymbol should never be inspected directly.
} else sym
}
- /** Convert a `super' prefix to a this-type if `sym'
- * is abstract or final.
- */
+ /** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
case SuperType(thistp, _) =>
if (sym.isEffectivelyFinal || sym.isDeferred) thistp
@@ -2845,14 +2838,14 @@ A type's typeSymbol should never be inspected directly.
}
/** A creator for type parameterizations that strips empty type parameter lists.
- * Use this factory method to indicate the type has kind * (it's a polymorphic value)
- * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty)
+ * Use this factory method to indicate the type has kind * (it's a polymorphic value)
+ * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
*/
def polyType(tparams: List[Symbol], tpe: Type): Type =
if (tparams nonEmpty) typeFun(tparams, tpe)
else tpe // it's okay to be forgiving here
- /** A creator for anonymous type functions, where the symbol for the type function still needs to be created
+ /** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
*
* TODO:
* type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion
@@ -2860,23 +2853,22 @@ A type's typeSymbol should never be inspected directly.
*/
def typeFunAnon(tps: List[Symbol], body: Type): Type = typeFun(tps, body)
- /** A creator for a type functions, assuming the type parameters tps already have the right owner
- */
+ /** A creator for a type functions, assuming the type parameters tps already have the right owner. */
def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body)
/** A creator for existential types. This generates:
*
* tpe1 where { tparams }
*
- * where `tpe1' is the result of extrapolating `tpe' wrt to `tparams'. Extrapolating means
- * that type variables in `tparams' occurring in covariant positions are replaced by upper bounds,
+ * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`. Extrapolating means
+ * that type variables in `tparams` occurring in covariant positions are replaced by upper bounds,
* (minus any SingletonClass markers),
- * type variables in `tparams' occurring in contravariant positions are replaced by upper bounds,
+ * type variables in `tparams` occurring in contravariant positions are replaced by upper bounds,
* provided the resulting type is legal wrt to stability, and does not contain any
- * type variable in `tparams'.
+ * type variable in `tparams`.
* The abstraction drops all type parameters that are not directly or indirectly
- * referenced by type `tpe1'.
- * If there are no remaining type parameters, simply returns result type `tpe'.
+ * referenced by type `tpe1`.
+ * If there are no remaining type parameters, simply returns result type `tpe`.
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
@@ -3346,19 +3338,19 @@ A type's typeSymbol should never be inspected directly.
def isRaw(sym: Symbol, args: List[Type]) =
!phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
- /** Is type tp a ``raw type''? */
+ /** Is type tp a ''raw type''? */
def isRawType(tp: Type) = tp match {
case TypeRef(_, sym, args) => isRaw(sym, args)
case _ => false
}
- /** The raw to existential map converts a ``raw type'' to an existential type.
+ /** The raw to existential map converts a ''raw type'' to an existential type.
* It is necessary because we might have read a raw type of a
* parameterized Java class from a class file. At the time we read the type
* the corresponding class file might still not be read, so we do not
* know what the type parameters of the type are. Therefore
* the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done)
+ * in ClassFileparser.sigToType (where it is usually done).
*/
object rawToExistential extends TypeMap {
private var expanded = immutable.Set[Symbol]()
@@ -3428,7 +3420,7 @@ A type's typeSymbol should never be inspected directly.
qvar
}).tpe
- /** Return pre.baseType(clazz), or if that's NoType and clazz is a refinement, pre itself.
+ /** Return `pre.baseType(clazz)`, or if that's `NoType` and `clazz` is a refinement, `pre` itself.
* See bug397.scala for an example where the second alternative is needed.
* The problem is that when forming the base type sequence of an abstract type,
* any refinements in the base type list might be regenerated, and thus acquire
@@ -3526,9 +3518,7 @@ A type's typeSymbol should never be inspected directly.
val fromContains = (x: Symbol) => from.contains(x) //from.toSet <-- traversing short lists seems to be faster than allocating sets
assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
- /** Are `sym' and `sym1' the same.
- * Can be tuned by subclasses.
- */
+ /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
/** Map target to type, can be tuned by subclasses */
@@ -3581,7 +3571,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `substSym' method. */
+ /** A map to implement the `substSym` method. */
class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
@@ -3637,7 +3627,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `subst' method. */
+ /** A map to implement the `subst` method. */
class SubstTypeMap(from: List[Symbol], to: List[Type])
extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
@@ -3659,7 +3649,7 @@ A type's typeSymbol should never be inspected directly.
}
- /** A map to implement the `substThis' method. */
+ /** A map to implement the `substThis` method. */
class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
def apply(tp: Type): Type = tp match {
case ThisType(sym) if (sym == from) => to
@@ -3799,8 +3789,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to convert every occurrence of a type variable to a
- wildcard type */
+ /** A map to convert every occurrence of a type variable to a wildcard type. */
object typeVarToOriginMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeVar(origin, _) => origin
@@ -3808,7 +3797,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `contains' method */
+ /** A map to implement the `contains` method. */
class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
def traverse(tp: Type) {
if (!result) {
@@ -3830,7 +3819,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `contains' method */
+ /** A map to implement the `contains` method. */
class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
def traverse(tp: Type) {
if (!result) {
@@ -3846,7 +3835,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `filter' method */
+ /** A map to implement the `filter` method. */
class FilterTypeCollector(p: Type => Boolean) extends TypeCollector(new ListBuffer[Type]) {
def traverse(tp: Type) {
if (p(tp)) result += tp
@@ -3861,7 +3850,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `filter' method */
+ /** A map to implement the `filter` method. */
class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
def traverse(tp: Type) {
if (result.isEmpty) {
@@ -3871,7 +3860,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** A map to implement the `contains' method */
+ /** A map to implement the `contains` method. */
object ErroneousCollector extends TypeCollector(false) {
def traverse(tp: Type) {
if (!result) {
@@ -4041,9 +4030,9 @@ A type's typeSymbol should never be inspected directly.
final val LubGlbMargin = 0
- /** The maximum allowable depth of lubs or glbs over types `ts'
+ /** The maximum allowable depth of lubs or glbs over types `ts`.
* This is the maximum depth of all types in the base type sequences
- * of each of the types `ts', plus LubGlbMargin
+ * of each of the types `ts`, plus LubGlbMargin.
*/
def lubDepth(ts: List[Type]) = {
var d = 0
@@ -4091,8 +4080,8 @@ A type's typeSymbol should never be inspected directly.
check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
}
- /** Does a pattern of type `patType' need an outer test when executed against
- * selector type `selType' in context defined by `currentOwner'?
+ /** Does a pattern of type `patType` need an outer test when executed against
+ * selector type `selType` in context defined by `currentOwner`?
*/
def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
def createDummyClone(pre: Type): Type = {
@@ -4150,8 +4139,7 @@ A type's typeSymbol should never be inspected directly.
if (sym1 == sym2) sym1.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
- /** Do `tp1' and `tp2' denote equivalent types?
- */
+ /** Do `tp1` and `tp2` denote equivalent types? */
def isSameType(tp1: Type, tp2: Type): Boolean = try {
incCounter(sametypeCount)
subsametypeRecursions += 1
@@ -4480,9 +4468,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** Are `tps1' and `tps2' lists of pairwise equivalent
- * types?
- */
+ /** Are `tps1` and `tps2` lists of pairwise equivalent types? */
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
/** True if two lists have the same length. Since calling length on linear sequences
@@ -4618,8 +4604,7 @@ A type's typeSymbol should never be inspected directly.
def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
- /** Does type `tp1' conform to `tp2'?
- */
+ /** Does type `tp1` conform to `tp2`? */
private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
if ((tp1 eq NoType) || (tp2 eq NoType)) return false
@@ -4807,14 +4792,13 @@ A type's typeSymbol should never be inspected directly.
firstTry
}
- /** Are `tps1' and `tps2' lists of equal length such
- * that all elements of `tps1' conform to corresponding elements
- * of `tps2'?
+ /** Are `tps1` and `tps2` lists of equal length such that all elements
+ * of `tps1` conform to corresponding elements of `tps2`?
*/
def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
- /** Does type `tp' implement symbol `sym' with same or
- * stronger type? Exact only if `sym' is a member of some
+ /** Does type `tp` implement symbol `sym` with same or
+ * stronger type? Exact only if `sym` is a member of some
* refinement type, otherwise we might return false negatives.
*/
def specializesSym(tp: Type, sym: Symbol): Boolean =
@@ -4823,8 +4807,8 @@ A type's typeSymbol should never be inspected directly.
(tp.nonPrivateMember(sym.name).alternatives exists
(alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym)))
- /** Does member `sym1' of `tp1' have a stronger type
- * than member `sym2' of `tp2'?
+ /** Does member `sym1` of `tp1` have a stronger type
+ * than member `sym2` of `tp2`?
*/
private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol): Boolean = {
val info1 = tp1.memberInfo(sym1)
@@ -4840,7 +4824,7 @@ A type's typeSymbol should never be inspected directly.
sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
}
- /** A function implementing `tp1' matches `tp2' */
+ /** A function implementing `tp1` matches `tp2`. */
final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean = (
sameLength(tparams1, tparams2) &&
@@ -4944,7 +4928,7 @@ A type's typeSymbol should never be inspected directly.
}
*/
- /** Are `syms1' and `syms2' parameter lists with pairwise equivalent types? */
+ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
case Nil =>
syms2.isEmpty
@@ -4962,8 +4946,8 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** like map2, but returns list `xs' itself - instead of a copy - if function
- * `f' maps all elements to themselves.
+ /** like map2, but returns list `xs` itself - instead of a copy - if function
+ * `f` maps all elements to themselves.
*/
def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
if (xs.isEmpty) xs
@@ -4974,13 +4958,13 @@ A type's typeSymbol should never be inspected directly.
else x1 :: xs1
}
- /** Solve constraint collected in types `tvars'.
+ /** Solve constraint collected in types `tvars`.
*
* @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to `tvars'
+ * @param tparams The type parameters corresponding to `tvars`
* @param variances The variances of type parameters; need to reverse
* solution direction for all contravariant variables.
- * @param upper When `true' search for max solution else min.
+ * @param upper When `true` search for max solution else min.
*/
def solve(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean): Boolean =
@@ -5050,8 +5034,7 @@ A type's typeSymbol should never be inspected directly.
tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
}
- /** Do type arguments `targs' conform to formal parameters
- * `tparams'?
+ /** Do type arguments `targs` conform to formal parameters `tparams`?
*
* @param tparams ...
* @param targs ...
@@ -5335,8 +5318,8 @@ A type's typeSymbol should never be inspected directly.
val GlbFailure = new Throwable
- /** A global counter for glb calls in the `specializes' query connected to the `addMembers'
- * call in `glb'. There's a possible infinite recursion when `specializes' calls
+ /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+ * call in `glb`. There's a possible infinite recursion when `specializes` calls
* memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
* The counter breaks this recursion after two calls.
* If the recursion is broken, no member is added to the glb.
@@ -5482,11 +5465,11 @@ A type's typeSymbol should never be inspected directly.
commonOwnerMap.result
}
- /** Compute lub (if variance == 1) or glb (if variance == -1) of given list
- * of types `tps'. All types in `tps' are typerefs or singletypes
+ /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
+ * of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
- * Return `Some(x)' if the computation succeeds with result `x'.
- * Return `None' if the computation fails.
+ * Return `Some(x)` if the computation succeeds with result `x`.
+ * Return `None` if the computation fails.
*/
def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
case List(tp) =>
@@ -5557,8 +5540,8 @@ A type's typeSymbol should never be inspected directly.
assert(false, tps); None
}
- /** Make symbol `sym' a member of scope `tp.decls'
- * where `thistp' is the narrowed owner type of the scope.
+ /** Make symbol `sym` a member of scope `tp.decls`
+ * where `thistp` is the narrowed owner type of the scope.
*/
def addMember(thistp: Type, tp: Type, sym: Symbol) {
assert(sym != NoSymbol)
@@ -5638,9 +5621,9 @@ A type's typeSymbol should never be inspected directly.
def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
- /** Check whether <arg>sym1</arg>'s variance conforms to <arg>sym2</arg>'s variance
+ /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
*
- * If <arg>sym2</arg> is invariant, <arg>sym1</arg>'s variance is irrelevant. Otherwise they must be equal.
+ * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
*/
def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
@@ -5740,9 +5723,7 @@ A type's typeSymbol should never be inspected directly.
/** The current indentation string for traces */
private var indent: String = ""
- /** Perform operation `p' on arguments `tp1',
- * `arg2' and print trace of computation.
- */
+ /** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
private def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.asInstanceOf[AnyRef].getClass+")"*/)
indent = indent + " "
@@ -5752,22 +5733,17 @@ A type's typeSymbol should never be inspected directly.
result
}
- /** If option `explaintypes' is set, print a subtype trace for
- * `found <:< required'.
- */
+ /** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */
def explainTypes(found: Type, required: Type) {
if (settings.explaintypes.value) withTypesExplained(found <:< required)
}
- /** If option `explaintypes' is set, print a subtype trace for
- * `op(found, required)'.
- */
+ /** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */
def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
if (settings.explaintypes.value) withTypesExplained(op(found, required))
}
- /** Execute `op' while printing a trace of the operations on types executed.
- */
+ /** Execute `op` while printing a trace of the operations on types executed. */
def withTypesExplained[A](op: => A): A = {
val s = explainSwitch
try { explainSwitch = true; op } finally { explainSwitch = s }
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 3b31730827..1148b4de28 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -227,7 +227,7 @@ trait DocComments { self: Global =>
}
}
- /** Expand variable occurrences in string `str', until a fix point is reached or
+ /** Expand variable occurrences in string `str`, until a fix point is reached or
* a expandLimit is exceeded.
*
* @param str The string to be expanded
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 92b55e7702..35b1fc3ee8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -108,7 +108,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
else arg
}
- /** Make forwarder to method `target', passing all parameters in `params' */
+ /** Make forwarder to method `target`, passing all parameters in `params` */
def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) =
(target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg))
@@ -194,8 +194,8 @@ abstract class TreeGen extends reflect.internal.TreeGen {
else Block(prefix, containing) setPos (prefix.head.pos union containing.pos)
}
- /** Return a double-checked locking idiom around the syncBody tree. It guards with 'cond' and
- * synchronizez on 'clazz.this'. Additional statements can be included after initialization,
+ /** Return a double-checked locking idiom around the syncBody tree. It guards with `cond` and
+ * synchronizez on `clazz.this`. Additional statements can be included after initialization,
* (outside the synchronized block).
*
* The idiom works only if the condition is using a volatile field.
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index c7134ce91a..7793299978 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -397,12 +397,12 @@ self =>
/* --------------- PLACEHOLDERS ------------------------------------------- */
- /** The implicit parameters introduced by `_' in the current expression.
+ /** The implicit parameters introduced by `_` in the current expression.
* Parameters appear in reverse order
*/
var placeholderParams: List[ValDef] = Nil
- /** The placeholderTypes introduced by `_' in the current type.
+ /** The placeholderTypes introduced by `_` in the current type.
* Parameters appear in reverse order
*/
var placeholderTypes: List[TypeDef] = Nil
@@ -547,7 +547,7 @@ self =>
offset
}
- /** semi = nl {nl} | `;'
+ /** semi = nl {nl} | `;`
* nl = `\n' // where allowed
*/
def acceptStatSep(): Unit = in.token match {
@@ -838,8 +838,8 @@ self =>
ExistentialTypeTree(t, whereClauses)
}
- /** Type ::= InfixType `=>' Type
- * | `(' [`=>' Type] `)' `=>' Type
+ /** Type ::= InfixType `=>` Type
+ * | `(` [`=>` Type] `)` `=>` Type
* | InfixType [ExistentialClause]
* ExistentialClause ::= forSome `{' ExistentialDcl {semi ExistentialDcl}} `}'
* ExistentialDcl ::= type TypeDcl | val ValDcl
@@ -857,7 +857,7 @@ self =>
}
}
- /** TypeArgs ::= `[' ArgType {`,' ArgType} `]'
+ /** TypeArgs ::= `[` ArgType {`,` ArgType} `]`
*/
def typeArgs(): List[Tree] = inBrackets(types())
@@ -866,10 +866,10 @@ self =>
def annotType(): Tree = placeholderTypeBoundary { annotTypeRest(simpleType()) }
/** SimpleType ::= SimpleType TypeArgs
- * | SimpleType `#' Id
+ * | SimpleType `#` Id
* | StableId
- * | Path `.' type
- * | `(' Types `)'
+ * | Path `.` type
+ * | `(` Types `)`
* | WildcardType
*/
def simpleType(): Tree = {
@@ -952,7 +952,7 @@ self =>
def infixType(mode: InfixMode.Value): Tree =
placeholderTypeBoundary { infixTypeRest(compoundType(), mode) }
- /** Types ::= Type {`,' Type}
+ /** Types ::= Type {`,` Type}
*/
def types(): List[Tree] = commaSeparated(argType())
def functionTypes(): List[Tree] = commaSeparated(functionArgType())
@@ -984,8 +984,8 @@ self =>
}
/** Path ::= StableId
- * | [Ident `.'] this
- * AnnotType ::= Path [`.' type]
+ * | [Ident `.`] this
+ * AnnotType ::= Path [`.` type]
*/
def path(thisOK: Boolean, typeOK: Boolean): Tree = {
val start = in.offset
@@ -1040,20 +1040,20 @@ self =>
else t1
}
- /** MixinQualifier ::= `[' Id `]'
+ /** MixinQualifier ::= `[` Id `]`
*/
def mixinQualifierOpt(): TypeName =
if (in.token == LBRACKET) inBrackets(identForType())
else tpnme.EMPTY
/** StableId ::= Id
- * | Path `.' Id
- * | [id '.'] super [`[' id `]']`.' id
+ * | Path `.` Id
+ * | [id '.'] super [`[` id `]`]`.` id
*/
def stableId(): Tree =
path(false, false)
- /** QualId ::= Id {`.' Id}
+ /** QualId ::= Id {`.` Id}
*/
def qualId(): Tree = {
val start = in.offset
@@ -1128,7 +1128,7 @@ self =>
/* ------------- TYPES ---------------------------------------------------- */
- /** TypedOpt ::= [`:' Type]
+ /** TypedOpt ::= [`:` Type]
*/
def typedOpt(): Tree =
if (in.token == COLON) { in.nextToken(); typ() }
@@ -1141,7 +1141,7 @@ self =>
def annotTypeRest(t: Tree): Tree =
(t /: annotations(false)) (makeAnnotated)
- /** WildcardType ::= `_' TypeBounds
+ /** WildcardType ::= `_` TypeBounds
*/
def wildcardType(start: Int) = {
val pname = freshTypeName("_$")
@@ -1154,7 +1154,7 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** EqualsExpr ::= `=' Expr
+ /** EqualsExpr ::= `=` Expr
*/
def equalsExpr(): Tree = {
accept(EQUALS)
@@ -1178,26 +1178,26 @@ self =>
*/
def statement(location: Int): Tree = expr(location) // !!! still needed?
- /** Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
+ /** Expr ::= (Bindings | [`implicit`] Id | `_`) `=>` Expr
* | Expr1
- * ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block
+ * ResultExpr ::= (Bindings | Id `:` CompoundType) `=>` Block
* | Expr1
- * Expr1 ::= if `(' Expr `)' {nl} Expr [[semi] else Expr]
+ * Expr1 ::= if `(` Expr `)` {nl} Expr [[semi] else Expr]
* | try (`{' Block `}' | Expr) [catch `{' CaseClauses `}'] [finally Expr]
- * | while `(' Expr `)' {nl} Expr
- * | do Expr [semi] while `(' Expr `)'
- * | for (`(' Enumerators `)' | '{' Enumerators '}') {nl} [yield] Expr
+ * | while `(` Expr `)` {nl} Expr
+ * | do Expr [semi] while `(` Expr `)`
+ * | for (`(` Enumerators `)` | '{' Enumerators '}') {nl} [yield] Expr
* | throw Expr
* | return [Expr]
- * | [SimpleExpr `.'] Id `=' Expr
- * | SimpleExpr1 ArgumentExprs `=' Expr
+ * | [SimpleExpr `.`] Id `=` Expr
+ * | SimpleExpr1 ArgumentExprs `=` Expr
* | PostfixExpr Ascription
* | PostfixExpr match `{' CaseClauses `}'
- * Bindings ::= `(' [Binding {`,' Binding}] `)'
- * Binding ::= (Id | `_') [`:' Type]
- * Ascription ::= `:' CompoundType
- * | `:' Annotation {Annotation}
- * | `:' `_' `*'
+ * Bindings ::= `(` [Binding {`,` Binding}] `)`
+ * Binding ::= (Id | `_`) [`:` Type]
+ * Ascription ::= `:` CompoundType
+ * | `:` Annotation {Annotation}
+ * | `:` `_` `*`
*/
def expr(): Tree = expr(Local)
@@ -1402,7 +1402,7 @@ self =>
reduceStack(true, base, top, 0, true)
}
- /** PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ /** PrefixExpr ::= [`-` | `+` | `~` | `!` | `&`] SimpleExpr
*/
def prefixExpr(): Tree = {
if (isUnaryOp) {
@@ -1418,12 +1418,12 @@ self =>
/* SimpleExpr ::= new (ClassTemplate | TemplateBody)
* | BlockExpr
- * | SimpleExpr1 [`_']
+ * | SimpleExpr1 [`_`]
* SimpleExpr1 ::= literal
* | xLiteral
* | Path
- * | `(' [Exprs] `)'
- * | SimpleExpr `.' Id
+ * | `(` [Exprs] `)`
+ * | SimpleExpr `.` Id
* | SimpleExpr TypeArgs
* | SimpleExpr1 ArgumentExprs
*/
@@ -1503,7 +1503,7 @@ self =>
}
}
- /** ArgumentExprs ::= `(' [Exprs] `)'
+ /** ArgumentExprs ::= `(` [Exprs] `)`
* | [nl] BlockExpr
*/
def argumentExprs(): List[Tree] = {
@@ -1543,7 +1543,7 @@ self =>
def block(): Tree = makeBlock(blockStatSeq())
/** CaseClauses ::= CaseClause {CaseClause}
- * CaseClause ::= case Pattern [Guard] `=>' Block
+ * CaseClause ::= case Pattern [Guard] `=>` Block
*/
def caseClauses(): List[CaseDef] = {
val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
@@ -1566,7 +1566,7 @@ self =>
/** Enumerators ::= Generator {semi Enumerator}
* Enumerator ::= Generator
* | Guard
- * | val Pattern1 `=' Expr
+ * | val Pattern1 `=` Expr
*/
def enumerators(): List[Enumerator] = {
val enums = new ListBuffer[Enumerator]
@@ -1579,7 +1579,7 @@ self =>
enums.toList
}
- /** Generator ::= Pattern1 (`<-' | '=') Expr [Guard]
+ /** Generator ::= Pattern1 (`<-` | '=') Expr [Guard]
*/
def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
val start = in.offset
@@ -1626,13 +1626,13 @@ self =>
}
}
- /** Patterns ::= Pattern { `,' Pattern }
- * SeqPatterns ::= SeqPattern { `,' SeqPattern }
+ /** Patterns ::= Pattern { `,` Pattern }
+ * SeqPatterns ::= SeqPattern { `,` SeqPattern }
*/
def patterns(): List[Tree] = commaSeparated(pattern())
- /** Pattern ::= Pattern1 { `|' Pattern1 }
- * SeqPattern ::= SeqPattern1 { `|' SeqPattern1 }
+ /** Pattern ::= Pattern1 { `|` Pattern1 }
+ * SeqPattern ::= SeqPattern1 { `|` SeqPattern1 }
*/
def pattern(): Tree = {
val start = in.offset
@@ -1646,11 +1646,11 @@ self =>
}
}
- /** Pattern1 ::= varid `:' TypePat
- * | `_' `:' TypePat
+ /** Pattern1 ::= varid `:` TypePat
+ * | `_` `:` TypePat
* | Pattern2
- * SeqPattern1 ::= varid `:' TypePat
- * | `_' `:' TypePat
+ * SeqPattern1 ::= varid `:` TypePat
+ * | `_` `:` TypePat
* | [SeqPattern2]
*/
def pattern1(): Tree = pattern2() match {
@@ -1700,18 +1700,18 @@ self =>
}
/** SimplePattern ::= varid
- * | `_'
+ * | `_`
* | literal
* | XmlPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
- * | `(' [Patterns] `)'
+ * | StableId [TypeArgs] [`(` [SeqPatterns] `)`]
+ * | `(` [Patterns] `)`
* SimpleSeqPattern ::= varid
- * | `_'
+ * | `_`
* | literal
* | XmlPattern
- * | `<' xLiteralPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
- * | `(' [SeqPatterns] `)'
+ * | `<` xLiteralPattern
+ * | StableId [TypeArgs] [`(` [SeqPatterns] `)`]
+ * | `(` [SeqPatterns] `)`
*
* XXX: Hook for IDE
*/
@@ -1799,8 +1799,8 @@ self =>
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- /** Drop `private' modifier when followed by a qualifier.
- * Contract `abstract' and `override' to ABSOVERRIDE
+ /** Drop `private` modifier when followed by a qualifier.
+ * Contract `abstract` and `override` to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
@@ -1884,8 +1884,8 @@ self =>
loop(NoMods)
}
- /** Annotations ::= {`@' SimpleType {ArgumentExprs}}
- * ConsrAnnotations ::= {`@' SimpleType ArgumentExprs}
+ /** Annotations ::= {`@` SimpleType {ArgumentExprs}}
+ * ConsrAnnotations ::= {`@` SimpleType ArgumentExprs}
*/
def annotations(skipNewLines: Boolean): List[Tree] = readAnnots {
val t = annotationExpr()
@@ -1904,14 +1904,14 @@ self =>
/* -------- PARAMETERS ------------------------------------------- */
- /** ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
- * ParamClause ::= [nl] `(' [Params] ')'
- * Params ::= Param {`,' Param}
- * Param ::= {Annotation} Id [`:' ParamType] [`=' Expr]
- * ClassParamClauses ::= {ClassParamClause} [[nl] `(' implicit ClassParams `)']
- * ClassParamClause ::= [nl] `(' [ClassParams] ')'
- * ClassParams ::= ClassParam {`,' ClassParam}
- * ClassParam ::= {Annotation} [{Modifier} (`val' | `var')] Id [`:' ParamType] [`=' Expr]
+ /** ParamClauses ::= {ParamClause} [[nl] `(` implicit Params `)`]
+ * ParamClause ::= [nl] `(` [Params] ')'
+ * Params ::= Param {`,` Param}
+ * Param ::= {Annotation} Id [`:` ParamType] [`=` Expr]
+ * ClassParamClauses ::= {ClassParamClause} [[nl] `(` implicit ClassParams `)`]
+ * ClassParamClause ::= [nl] `(` [ClassParams] ')'
+ * ClassParams ::= ClassParam {`,` ClassParam}
+ * ClassParam ::= {Annotation} [{Modifier} (`val` | `var`)] Id [`:` ParamType] [`=` Expr]
*/
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
var implicitmod = 0
@@ -2002,7 +2002,7 @@ self =>
addEvidenceParams(owner, result, contextBounds)
}
- /** ParamType ::= Type | `=>' Type | Type `*'
+ /** ParamType ::= Type | `=>` Type | Type `*`
*/
def paramType(): Tree = paramType(useStartAsPosition = false)
def paramType(useStartAsPosition: Boolean): Tree = {
@@ -2023,10 +2023,10 @@ self =>
}
/** TypeParamClauseOpt ::= [TypeParamClause]
- * TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
- * VariantTypeParam ::= {Annotation} [`+' | `-'] TypeParam
+ * TypeParamClause ::= `[` VariantTypeParam {`,` VariantTypeParam} `]`]
+ * VariantTypeParam ::= {Annotation} [`+` | `-`] TypeParam
* FunTypeParamClauseOpt ::= [FunTypeParamClause]
- * FunTypeParamClause ::= `[' TypeParam {`,' TypeParam} `]']
+ * FunTypeParamClause ::= `[` TypeParam {`,` TypeParam} `]`]
* TypeParam ::= Id TypeParamClauseOpt TypeBounds {<% Type} {":" Type}
*/
def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree]): List[TypeDef] = {
@@ -2068,7 +2068,7 @@ self =>
else Nil
}
- /** TypeBounds ::= [`>:' Type] [`<:' Type]
+ /** TypeBounds ::= [`>:` Type] [`<:` Type]
*/
def typeBounds(): TypeBoundsTree = {
val t = TypeBoundsTree(
@@ -2085,7 +2085,7 @@ self =>
/* -------- DEFS ------------------------------------------- */
- /** Import ::= import ImportExpr {`,' ImportExpr}
+ /** Import ::= import ImportExpr {`,` ImportExpr}
*/
def importClause(): List[Tree] = {
val offset = accept(IMPORT)
@@ -2098,7 +2098,7 @@ self =>
}
}
- /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ /** ImportExpr ::= StableId `.` (Id | `_` | ImportSelectors)
*/
def importExpr(): Tree = {
val start = in.offset
@@ -2144,7 +2144,7 @@ self =>
})
}
- /** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ /** ImportSelectors ::= `{' {ImportSelector `,`} (ImportSelector | `_`) `}'
*/
def importSelectors(): List[ImportSelector] = {
val selectors = inBracesOrNil(commaSeparated(importSelector()))
@@ -2160,7 +2160,7 @@ self =>
else ident()
}
- /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ /** ImportSelector ::= Id [`=>` Id | `=>` `_`]
*/
def importSelector(): ImportSelector = {
val start = in.offset
@@ -2213,9 +2213,9 @@ self =>
defOrDcl(caseAwareTokenOffset, modifiers() withAnnotations annots)
}
- /** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
- * ValDcl ::= Id {`,' Id} `:' Type
- * VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
+ /** PatDef ::= Pattern2 {`,` Pattern2} [`:` Type] `=` Expr
+ * ValDcl ::= Id {`,` Id} `:` Type
+ * VarDef ::= PatDef | Id {`,` Id} `:` Type `=` `_`
*/
def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = {
var newmods = mods
@@ -2261,8 +2261,8 @@ self =>
}
/** VarDef ::= PatDef
- * | Id {`,' Id} `:' Type `=' `_'
- * VarDcl ::= Id {`,' Id} `:' Type
+ * | Id {`,` Id} `:` Type `=` `_`
+ * VarDcl ::= Id {`,` Id} `:` Type
def varDefOrDcl(mods: Modifiers): List[Tree] = {
var newmods = mods | Flags.MUTABLE
val lhs = new ListBuffer[(Int, Name)]
@@ -2286,10 +2286,10 @@ self =>
}
*/
- /** FunDef ::= FunSig `:' Type `=' Expr
+ /** FunDef ::= FunSig `:` Type `=` Expr
* | FunSig [nl] `{' Block `}'
- * | this ParamClause ParamClauses (`=' ConstrExpr | [nl] ConstrBlock)
- * FunDcl ::= FunSig [`:' Type]
+ * | this ParamClause ParamClauses (`=` ConstrExpr | [nl] ConstrBlock)
+ * FunDcl ::= FunSig [`:` Type]
* FunSig ::= id [FunTypeParamClause] ParamClauses
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
@@ -2370,7 +2370,7 @@ self =>
Block(stats, Literal(()))
}
- /** TypeDef ::= type Id [TypeParamClause] `=' Type
+ /** TypeDef ::= type Id [TypeParamClause] `=` Type
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
*/
def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
@@ -2480,7 +2480,7 @@ self =>
}
}
- /** ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
+ /** ClassParents ::= AnnotType {`(` [Exprs] `)`} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
*/
def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
@@ -2536,7 +2536,7 @@ self =>
/** ClassTemplateOpt ::= 'extends' ClassTemplate | [['extends'] TemplateBody]
* TraitTemplateOpt ::= TraitExtends TraitTemplate | [['extends'] TemplateBody] | '<:' TemplateBody
- * TraitExtends ::= 'extends' | `<:'
+ * TraitExtends ::= 'extends' | `<:`
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
val (parents0, argss, self, body) = (
@@ -2668,7 +2668,7 @@ self =>
stats.toList
}
- /** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
+ /** TemplateStatSeq ::= [id [`:` Type] `=>`] TemplateStat {semi TemplateStat}
* TemplateStat ::= Import
* | Annotations Modifiers Def
* | Annotations Modifiers Dcl
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 810d3e7497..b82ff4fb4f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -253,7 +253,7 @@ abstract class TreeBuilder {
LabelDef(lname, Nil, rhs)
}
- /** Create block of statements `stats' */
+ /** Create block of statements `stats` */
def makeBlock(stats: List[Tree]): Tree =
if (stats.isEmpty) Literal(())
else if (!stats.last.isTerm) Block(stats, Literal(()))
@@ -594,7 +594,7 @@ abstract class TreeBuilder {
def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
- /** Append implicit parameter section if `contextBounds' nonempty */
+ /** Append implicit parameter section if `contextBounds` nonempty */
def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] =
if (contextBounds.isEmpty) vparamss
else {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 70bbd85330..dc7c494c7f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -84,7 +84,7 @@ trait BasicBlocks {
var varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
/** ICode instructions, used as temporary storage while emitting code.
- * Once closed is called, only the `instrs' array should be used.
+ * Once closed is called, only the `instrs` array should be used.
*/
private var instructionList: List[Instruction] = Nil
@@ -149,7 +149,7 @@ trait BasicBlocks {
/**
* Replace the given instruction with the new one.
- * Returns `true' if it actually changed something.
+ * Returns `true` if it actually changed something.
* It retains the position of the previous instruction.
*/
def replaceInstruction(oldInstr: Instruction, newInstr: Instruction): Boolean = {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 993002dc1d..1880bdc52c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -12,10 +12,10 @@ import scala.collection.{ mutable, immutable, generic }
import util.{ Position, NoPosition }
/**
- * Exception handlers are pieces of code that `handle' exceptions on
+ * Exception handlers are pieces of code that `handle` exceptions on
* the covered basic blocks. Since Scala's exception handling uses
* pattern matching instead of just class names to identify handlers,
- * all our handlers will catch `Throwable' and rely on proper ordering
+ * all our handlers will catch `Throwable` and rely on proper ordering
* in the generated code to preserve nesting.
*/
trait ExceptionHandlers {
@@ -46,7 +46,7 @@ trait ExceptionHandlers {
this
}
- /** Is `b' covered by this exception handler? */
+ /** Is `b` covered by this exception handler? */
def covers(b: BasicBlock): Boolean = covered(b)
/** The body of this exception handler. May contain 'dead' blocks (which will not
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index dd85541780..d54100b53a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1168,7 +1168,7 @@ abstract class GenICode extends SubComponent {
}
}
- /** Load the qualifier of `tree' on top of the stack. */
+ /** Load the qualifier of `tree` on top of the stack. */
private def genLoadQualifier(tree: Tree, ctx: Context): Context =
tree match {
case Select(qualifier, _) =>
@@ -1884,7 +1884,7 @@ abstract class GenICode extends SubComponent {
/** current exception handlers */
var handlers: List[ExceptionHandler] = Nil
- /** The current monitors or finalizers, to be cleaned up upon `return'. */
+ /** The current monitors or finalizers, to be cleaned up upon `return`. */
var cleanups: List[Cleanup] = Nil
/** The exception handlers we are currently generating code for */
@@ -2178,8 +2178,8 @@ abstract class GenICode extends SubComponent {
* but with `NoSymbol` as the exception class. The covered blocks are all blocks of
* the `try { .. } catch { .. }`.
*
- * Also, TryMsil does not enter any Finalizers into the `cleanups', because the
- * CLI takes care of running the finalizer when seeing a `leave' statement inside
+ * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
+ * CLI takes care of running the finalizer when seeing a `leave` statement inside
* a try / catch.
*/
def TryMsil(body: Context => Context,
@@ -2232,7 +2232,7 @@ abstract class GenICode extends SubComponent {
* Represent a label in the current method code. In order
* to support forward jumps, labels can be created without
* having a deisgnated target block. They can later be attached
- * by calling `anchor'.
+ * by calling `anchor`.
*/
class Label(val symbol: Symbol) {
var anchored = false
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 952949f9c2..10f01a5d37 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -271,7 +271,7 @@ trait TypeKinds { self: ICodes =>
"REFERENCE to NoSymbol not allowed!")
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
@@ -307,7 +307,7 @@ trait TypeKinds { self: ICodes =>
}
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
@@ -352,7 +352,7 @@ trait TypeKinds { self: ICodes =>
override def toString = "ConcatClass"
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index 7d96c72d2b..e5eeff0d1c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -88,7 +88,7 @@ abstract class Liveness {
/** Abstract interpretation for one instruction. Very important:
* liveness is a backward DFA, so this method should be used to compute
- * liveness *before* the given instruction `i'.
+ * liveness *before* the given instruction `i`.
*/
def interpret(out: lattice.Elem, i: Instruction): lattice.Elem = {
var in = out
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 5b4e783db0..a4753800a2 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -983,8 +983,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
}
- /** Add forwarders for all methods defined in `module' that don't conflict
- * with methods in the companion class of `module'. A conflict arises when
+ /** Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
* a method with the same name is defined both in a class and its companion
* object: method signature is not taken into account.
*/
@@ -1014,7 +1014,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
lazy val membersInCommon =
memberNames(linkedModule) intersect memberNames(linkedClass)
- /** Should method `m' get a forwarder in the mirror class? */
+ /** Should method `m` get a forwarder in the mirror class? */
def shouldForward(m: Symbol): Boolean = (
m.owner != ObjectClass
&& m.isMethod
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 7abba299b5..5a4d192eff 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -626,7 +626,7 @@ abstract class GenMSIL extends SubComponent {
val endExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
/** When emitting the code (genBlock), the number of currently active try / catch
- * blocks. When seeing a `RETURN' inside a try / catch, we need to
+ * blocks. When seeing a `RETURN` inside a try / catch, we need to
* - store the result in a local (if it's not UNIT)
* - emit `Leave handlerReturnLabel` instead of the Return
* - emit code at the end: load the local and return its value
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index d485935a84..3f1e15ba0d 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -383,7 +383,7 @@ abstract class Inliners extends SubComponent {
handler
}
- /** alfa-rename `l' in caller's context. */
+ /** alfa-rename `l` in caller's context. */
def dupLocal(l: Local): Local = {
val sym = caller.sym.newVariable(l.sym.pos, freshName(l.sym.name.toString))
// sym.setInfo(l.sym.tpe)
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index 6771c5da64..2ef34cdd96 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -33,7 +33,7 @@ trait BuildManager {
/** Load saved dependency information. */
def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
def compiler: scala.tools.nsc.Global
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index ac31b20aa5..747d74f96c 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -153,14 +153,14 @@ trait CompilerControl { self: Global =>
def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
postWorkItem(new AskLinkPosItem(sym, source, response))
- /** Sets sync var `response' to list of members that are visible
+ /** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
* @pre source is loaded
*/
def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
postWorkItem(new AskTypeCompletionItem(pos, response))
- /** Sets sync var `response' to list of members that are visible
+ /** Sets sync var `response` to list of members that are visible
* as members of the scope enclosing `pos`.
* @pre source is loaded
*/
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index 3a1a9d5bd9..a5361c056e 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -352,7 +352,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
success
}
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
compiler.dependencyAnalysis.dependenciesFile = file
compiler.dependencyAnalysis.saveDependencies(fromFile)
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
index 121f356c68..dd06e7dafa 100644
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
@@ -95,7 +95,7 @@ class SimpleBuildManager(val settings: Settings) extends BuildManager {
success
}
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
compiler.dependencyAnalysis.dependenciesFile = file
compiler.dependencyAnalysis.saveDependencies(fromFile)
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 4b196877a3..ddf5f7101f 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -190,7 +190,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
lookupPathInternal(path, directory, false)
}
- /** Return an abstract file that does not check that `path' denotes
+ /** Return an abstract file that does not check that `path` denotes
* an existing file.
*/
def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = {
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index fe9aa3b9c0..bae69b6bfa 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -7,9 +7,9 @@ import java.io.Writer
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The presesentation follows the following grammar:
* {{{
- * Pickled = `true' | `false' | `null' | NumericLit | StringLit |
- * Labelled | Pickled `,' Pickled
- * Labelled = StringLit `(' Pickled? `)'
+ * Pickled = `true` | `false` | `null` | NumericLit | StringLit |
+ * Labelled | Pickled `,` Pickled
+ * Labelled = StringLit `(` Pickled? `)`
* }}}
*
* All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index e726c21631..1ea0255515 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -340,7 +340,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
List() // don't pass on annotations for now
}
- /** Annotation ::= TypeName [`(' AnnotationArgument {`,' AnnotationArgument} `)']
+ /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
*/
def annotation() {
val pos = in.currentPos
@@ -577,7 +577,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
* This one is tricky because a comma might also appear in an
* initializer. Since we don't parse initializers we don't know
* what the comma signifies.
- * We solve this with a second list buffer `maybe' which contains
+ * We solve this with a second list buffer `maybe` which contains
* potential variable definitions.
* Once we have reached the end of the statement, we know whether
* these potential definitions are real or not.
@@ -589,7 +589,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
in.nextToken
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
val name = ident()
- if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=' or `;', we know it's a real variable definition
+ if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
maybe.clear()
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 83213c498c..e5b0de6040 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -23,9 +23,9 @@ trait Matrix extends MatrixAdditions {
/** Translation of match expressions.
*
- * `p': pattern
- * `g': guard
- * `bx': body index
+ * `p`: pattern
+ * `g`: guard
+ * `bx`: body index
*
* internal representation is (tvars:List[Symbol], rows:List[Row])
*
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index bfca609ca7..3d3698d425 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -37,7 +37,7 @@ trait PatternBindings extends ast.TreeDSL
case _ => newTree
}
- // used as argument to `EqualsPatternClass'
+ // used as argument to `EqualsPatternClass`
case class PseudoType(o: Tree) extends SimpleTypeProxy {
override def underlying: Type = o.tpe
override def safeToString: String = "PseudoType("+o+")"
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 09cdbe984f..65ef421b65 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -314,7 +314,7 @@ class MutableSettings(val errorFn: String => Unit) extends scala.reflect.interna
}
/** A base class for settings of all types.
- * Subclasses each define a `value' field of the appropriate type.
+ * Subclasses each define a `value` field of the appropriate type.
*/
abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue with Mutable {
/** Will be called after this Setting is set for any extra work. */
@@ -437,7 +437,7 @@ class MutableSettings(val errorFn: String => Unit) extends scala.reflect.interna
def unparse: List[String] = value
}
- /** A setting represented by a string, (`default' unless set) */
+ /** A setting represented by a string, (`default` unless set) */
class StringSetting private[nsc](
name: String,
val arg: String,
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index cd963585dc..ff54613d47 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -84,7 +84,7 @@ abstract class SymbolLoaders {
*/
abstract class SymbolLoader extends LazyType {
- /** Load source or class file for `root', return */
+ /** Load source or class file for `root`, return */
protected def doComplete(root: Symbol): Unit
def sourcefile: Option[AbstractFile] = None
@@ -170,7 +170,7 @@ abstract class SymbolLoaders {
/**
* Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false' for all synthetic classes
+ * scope. On .NET, this method returns `false` for all synthetic classes
* (anonymous classes, implementation classes, module classes), their
* symtab is encoded in the pickle of another class.
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a539d48150..92d86c85f3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1214,7 +1214,7 @@ abstract class ClassfileParser {
* If the given name is not an inner class, it returns the symbol found in 'definitions'.
*/
def classSymbol(externalName: Name): Symbol = {
- /** Return the symbol of `innerName', having the given `externalName'. */
+ /** Return the symbol of `innerName`, having the given `externalName`. */
def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
def getMember(sym: Symbol, name: Name): Symbol =
if (static)
@@ -1231,7 +1231,7 @@ abstract class ClassfileParser {
else entry.outerName
val sym = classSymbol(outerName)
val s =
- // if loading during initialization of `definitions' typerPhase is not yet set.
+ // if loading during initialization of `definitions` typerPhase is not yet set.
// in that case we simply load the member at the current phase
if (currentRun.typerPhase != null)
atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 673abb8639..efa2a18d5a 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -69,7 +69,7 @@ abstract class TypeParser {
override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
}
- /* the names `classTParams' and `newTParams' stem from the forJVM version (ClassfileParser.sigToType())
+ /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
* but there are differences that should be kept in mind.
* forMSIL, a nested class knows nothing about any type-params in the nesting class,
* therefore newTParams is redundant (other than for recording lexical order),
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 78541db82a..c3fb79a47f 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -256,7 +256,7 @@ abstract class AddInterfaces extends InfoTransform {
/** Add mixin constructor definition
* def $init$(): Unit = ()
- * to `stats' unless there is already one.
+ * to `stats` unless there is already one.
*/
private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
if (treeInfo.firstConstructor(stats) != EmptyTree) stats
@@ -296,7 +296,7 @@ abstract class AddInterfaces extends InfoTransform {
}
(tree: @unchecked) match {
case Block(stats, expr) =>
- // needs `hasSymbol' check because `supercall' could be a block (named / default args)
+ // needs `hasSymbol` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
//assert(supercall.symbol.isClassConstructor, supercall)
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 6ea6f9c4d6..72dcd2e1da 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -60,7 +60,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
parameterNamed(nme.getterName(acc.originalName))
// The constructor parameter with given name. This means the parameter
- // has given name, or starts with given name, and continues with a `$' afterwards.
+ // has given name, or starts with given name, and continues with a `$` afterwards.
def parameterNamed(name: Name): Symbol = {
def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + "$")
@@ -103,7 +103,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
}
- // Move tree into constructor, take care of changing owner from `oldowner' to constructor symbol
+ // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
def intoConstructor(oldowner: Symbol, tree: Tree) =
intoConstructorTransformer.transform(
new ChangeOwnerTraverser(oldowner, constr.symbol)(tree))
@@ -114,7 +114,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
case _ => false
}
- // Create an assignment to class field `to' with rhs `from'
+ // Create an assignment to class field `to` with rhs `from`
def mkAssign(to: Symbol, from: Tree): Tree =
localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index c5c03b9181..1dc36633db 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -234,7 +234,7 @@ abstract class Erasure extends AddInterfaces
private val traceSig = util.Tracer(traceProp)
/** This object is only used for sanity testing when -check:genjvm is set.
- * In that case we make sure that the erasure of the `normalized' type
+ * In that case we make sure that the erasure of the `normalized` type
* is the same as the erased type that's generated. Normalization means
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
@@ -519,7 +519,7 @@ abstract class Erasure extends AddInterfaces
private def safeToRemoveUnbox(cls: Symbol): Boolean =
(cls == definitions.NullClass) || isBoxedValueClass(cls)
- /** Box `tree' of unboxed type */
+ /** Box `tree` of unboxed type */
private def box(tree: Tree): Tree = tree match {
case LabelDef(name, params, rhs) =>
val rhs1 = box(rhs)
@@ -679,7 +679,7 @@ abstract class Erasure extends AddInterfaces
override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
- /** A replacement for the standard typer's `typed1' method.
+ /** A replacement for the standard typer's `typed1` method.
*/
override protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
val tree1 = try {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 1ddb0bbf5a..7c3ace7637 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -66,14 +66,14 @@ abstract class LambdaLift extends InfoTransform {
private def isSameOwnerEnclosure(sym: Symbol) =
sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
- /** Mark symbol `sym' as being free in `enclosure', unless `sym'
- * is defined in `enclosure' or there is a class between `enclosure's owner
- * and the owner of `sym'.
- * Return `true' if there is no class between `enclosure' and
+ /** Mark symbol `sym` as being free in `enclosure`, unless `sym`
+ * is defined in `enclosure` or there is a class between `enclosure`s owner
+ * and the owner of `sym`.
+ * Return `true` if there is no class between `enclosure` and
* the owner of sym.
* pre: sym.isLocal, (enclosure.isMethod || enclosure.isClass)
*
- * The idea of `markFree' is illustrated with an example:
+ * The idea of `markFree` is illustrated with an example:
*
* def f(x: int) = {
* class C {
@@ -83,8 +83,8 @@ abstract class LambdaLift extends InfoTransform {
* }
* }
*
- * In this case `x' is free in the primary constructor of class `C'.
- * but it is not free in `D', because after lambda lift the code would be transformed
+ * In this case `x` is free in the primary constructor of class `C`.
+ * but it is not free in `D`, because after lambda lift the code would be transformed
* as follows:
*
* def f(x$0: int) {
@@ -179,7 +179,7 @@ abstract class LambdaLift extends InfoTransform {
}
}
- /** Compute free variables map `fvs'.
+ /** Compute free variables map `fvs`.
* Also assign unique names to all
* value/variable/let that are free in some function or class, and to
* all class/function symbols that are owned by some function.
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 9708372ee4..857966fadd 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -4,7 +4,7 @@ package transform
import scala.collection.{ mutable, immutable }
abstract class LazyVals extends Transform with TypingTransformers with ast.TreeDSL {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index da92e80709..5ea480f4ae 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -116,7 +116,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def isConcreteAccessor(member: Symbol) =
member.hasAccessorFlag && (!member.isDeferred || (member hasFlag lateDEFERRED))
- /** Is member overridden (either directly or via a bridge) in base class sequence `bcs'? */
+ /** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */
def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = atPhase(ownPhase) {
def hasOverridingAccessor(clazz: Symbol) = {
clazz.info.nonPrivateDecl(member.name).alternatives.exists(
@@ -486,7 +486,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - For every trait, add all late interface members to the class info
* - For every static implementation method:
* - remove override flag
- * - create a new method definition that also has a `self' parameter
+ * - create a new method definition that also has a `self` parameter
* (which comes first) Iuli: this position is assumed by tail call elimination
* on a different receiver. Storing a new 'this' assumes it is located at
* index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
@@ -633,7 +633,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
addDef(position(sym), DefDef(sym, rhs(sym.paramss.head)))
}
- /** Add `newdefs' to `stats', removing any abstract method definitions
+ /** Add `newdefs` to `stats`, removing any abstract method definitions
* in <code>stats</code> that are matched by some symbol defined in
* <code>newDefs</code>.
*/
@@ -656,7 +656,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
deferredBitmaps(clazz) = typedPos(clazz.pos)(tree)::deferredBitmaps.getOrElse(clazz, List())
}
- /** If `stat' is a superaccessor, complete it by adding a right-hand side.
+ /** If `stat` is a superaccessor, complete it by adding a right-hand side.
* Note: superaccessors are always abstract until this point.
* The method to call in a superaccessor is stored in the accessor symbol's alias field.
* The rhs is:
@@ -1026,11 +1026,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case None =>
}
- // for all symbols `sym' in the class definition, which are mixed in:
+ // for all symbols `sym` in the class definition, which are mixed in:
for (sym <- clazz.info.decls.toList) {
if (sym hasFlag MIXEDIN) {
if (clazz hasFlag lateINTERFACE) {
- // if current class is a trait interface, add an abstract method for accessor `sym'
+ // if current class is a trait interface, add an abstract method for accessor `sym`
addDefDef(sym, vparamss => EmptyTree)
} else if (!clazz.isTrait) {
// if class is not a trait add accessor definitions
@@ -1144,7 +1144,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* to static calls of methods in implementation modules (@see staticCall)
* - change super calls to methods in implementation classes to static calls
* (@see staticCall)
- * - change `this' in implementation modules to references to the self parameter
+ * - change `this` in implementation modules to references to the self parameter
* - refer to fields in some implementation class vie an abstract method in the interface.
*/
private def postTransform(tree: Tree): Tree = {
@@ -1180,7 +1180,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - if qual != super, qual itself
* - if qual == super, and we are in an implementation class,
* the current self parameter.
- * - if qual == super, and we are not in an implementation class, `this'
+ * - if qual == super, and we are not in an implementation class, `this`
*/
def staticCall(target: Symbol) = {
if (target == NoSymbol)
@@ -1196,7 +1196,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Super(_, mix) =>
// change super calls to methods in implementation classes to static calls.
// Transform references super.m(args) as follows:
- // - if `m' refers to a trait, insert a static call to the corresponding static
+ // - if `m` refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
if (mix == tpnme.EMPTY && currentOwner.enclClass.isImplClass)
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index e7e394b21f..73d17458bf 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -9,7 +9,7 @@ package transform
/** A sample transform.
*/
abstract class SampleTransform extends Transform {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
@@ -24,19 +24,19 @@ abstract class SampleTransform extends Transform {
class SampleTransformer(unit: CompilationUnit) extends Transformer {
override def transform(tree: Tree): Tree = {
- val tree1 = super.transform(tree); // transformers always maintain `currentOwner'.
+ val tree1 = super.transform(tree); // transformers always maintain `currentOwner`.
tree1 match {
case Block(List(), expr) => // a simple optimization
expr
case Block(defs, sup @ Super(qual, mix)) => // A hypthothetic transformation, which replaces
// {super} by {super.sample}
- treeCopy.Block( // `copy' is the usual lazy tree copier
+ treeCopy.Block( // `copy` is the usual lazy tree copier
tree1, defs,
- typed( // `typed' assigns types to its tree argument
- atPos(tree1.pos)( // `atPos' fills in position of its tree argument
- Select( // The `Select' factory method is defined in class `Trees'
+ typed( // `typed` assigns types to its tree argument
+ atPos(tree1.pos)( // `atPos` fills in position of its tree argument
+ Select( // The `Select` factory method is defined in class `Trees`
sup,
- currentOwner.newValue( // creates a new term symbol owned by `currentowner'
+ currentOwner.newValue( // creates a new term symbol owned by `currentowner`
tree1.pos,
newTermName("sample")))))) // The standard term name creator
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 10f35075af..487716c457 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -78,7 +78,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Is the given environment a valid specialization for sym?
* It is valid if each binding is from a @specialized type parameter in sym (or its owner)
- * to a type for which `sym' is specialized.
+ * to a type for which `sym` is specialized.
*/
def isValid(env: TypeEnv, sym: Symbol): Boolean = {
def validBinding(tvar: Symbol, tpe: Type, sym: Symbol) = (
@@ -156,7 +156,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def target = t
}
- /** Symbol is a specialized accessor for the `target' field. */
+ /** Symbol is a specialized accessor for the `target` field. */
case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
override def isAccessor = true
}
@@ -164,7 +164,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
- /** Symbol is a specialized override paired with `target'. */
+ /** Symbol is a specialized override paired with `target`. */
case class SpecialOverride(target: Symbol) extends SpecializedInfo
/** A specialized inner class that specializes original inner class `target` on a type parameter of the enclosing class, in the typeenv `env`. */
@@ -192,7 +192,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Map a symbol to additional information on specialization. */
private val info: mutable.Map[Symbol, SpecializedInfo] = new mutable.HashMap[Symbol, SpecializedInfo]
- /** Has `clazz' any type parameters that need be specialized? */
+ /** Has `clazz` any type parameters that need be specialized? */
def hasSpecializedParams(clazz: Symbol): Boolean =
!specializedParams(clazz).isEmpty
@@ -293,7 +293,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
lazy val primitiveTypes = ScalaValueClasses map (_.tpe)
- /** Return the types `sym' should be specialized at. This may be some of the primitive types
+ /** Return the types `sym` should be specialized at. This may be some of the primitive types
* or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a
* subtype of AnyRef (T <: AnyRef).
*/
@@ -321,7 +321,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** Return a list of all type environments for all specializations
- * of @specialized types in `tps'.
+ * of @specialized types in `tps`.
*/
private def specializations(tps: List[Symbol]): List[TypeEnv] = {
val stps = tps filter isSpecialized
@@ -557,7 +557,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Enter 'sym' in the scope of the current specialized class. It's type is
* mapped through the active environment, binding type variables to concrete
- * types. The existing typeEnv for `sym' is composed with the current active
+ * types. The existing typeEnv for `sym` is composed with the current active
* environment
*/
def enterMember(sym: Symbol): Symbol = {
@@ -573,9 +573,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
decls1.enter(subst(fullEnv)(sym))
}
- /** Create and enter in scope an overridden symbol m1 for `m' that forwards
- * to `om'. `om' is a fresh, special overload of m1 that is an implementation
- * of `m'. For example, for a
+ /** Create and enter in scope an overridden symbol m1 for `m` that forwards
+ * to `om`. `om` is a fresh, special overload of m1 that is an implementation
+ * of `m`. For example, for a
*
* class Foo[@specialized A] {
* def m(x: A) = <body> // m
@@ -758,7 +758,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
decls1
}
- /** Expand member `sym' to a set of normalized members. Normalized members
+ /** Expand member `sym` to a set of normalized members. Normalized members
* are monomorphic or polymorphic only in non-specialized types.
*
* Given method m[@specialized T, U](x: T, y: U) it returns
@@ -806,7 +806,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else List(sym)
}
- /** Specialize member `m' w.r.t. to the outer environment and the type
+ /** Specialize member `m` w.r.t. to the outer environment and the type
* parameters of the innermost enclosing class.
*
* Turns 'private' into 'protected' for members that need specialization.
@@ -843,7 +843,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
List()
}
- /** Return the specialized overload of `m', in the given environment. */
+ /** Return the specialized overload of `m`, in the given environment. */
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
specMember.name = specializedName(sym, env)
@@ -854,7 +854,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** For each method m that overrides inherited method m', add a special
- * overload method `om' that overrides the corresponding overload in the
+ * overload method `om` that overrides the corresponding overload in the
* superclass. For the following example:
*
* class IntFun extends Function1[Int, Int] {
@@ -1023,7 +1023,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def default(key: Symbol) = emptyEnv
}
- /** Apply type bindings in the given environment `env' to all declarations. */
+ /** Apply type bindings in the given environment `env` to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
decls map subst(env)
@@ -1137,8 +1137,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
conflicting(env, silent)
}
- /** Is any type variable in `env' conflicting with any if its type bounds, when
- * type bindings in `env' are taken into account?
+ /** Is any type variable in `env` conflicting with any if its type bounds, when
+ * type bindings in `env` are taken into account?
*
* A conflicting type environment could still be satisfiable.
*/
@@ -1727,7 +1727,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def forwardCtorCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
- /** A constructor parameter `f' initializes a specialized field
+ /** A constructor parameter `f` initializes a specialized field
* iff:
* - it is specialized itself
* - there is a getter for the original (non-specialized) field in the same class
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index b62b2aa2a8..0916cf989d 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -19,7 +19,7 @@ import scala.collection.{ mutable, immutable }
* - for every use of a def-parameter: x ==> x.apply()
* - for every argument to a def parameter `x: => T':
* if argument is not a reference to a def parameter:
- * convert argument `e' to (expansion of) `() => e'
+ * convert argument `e` to (expansion of) `() => e'
* - for every repeated Scala parameter `x: T*' --> x: Seq[T].
* - for every repeated Java parameter `x: T...' --> x: Array[T], except:
* if T is an unbounded abstract type, replace --> x: Array[Object]
@@ -106,7 +106,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
if (sym.isType) uncurryType(tp) else uncurry(tp)
/** Traverse tree omitting local method definitions.
- * If a `return' is encountered, set `returnFound' to true.
+ * If a `return` is encountered, set `returnFound` to true.
* Used for MSIL only.
*/
private object lookForReturns extends Traverser {
@@ -155,8 +155,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
throw ex
}
- /* Is tree a reference `x' to a call by name parameter that needs to be converted to
- * x.apply()? Note that this is not the case if `x' is used as an argument to another
+ /* Is tree a reference `x` to a call by name parameter that needs to be converted to
+ * x.apply()? Note that this is not the case if `x` is used as an argument to another
* call by name parameter.
*/
def isByNameRef(tree: Tree): Boolean =
@@ -482,7 +482,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
forMSIL && lookForReturns.found(tree)
- /** Transform tree `t' to { def f = t; f } where `f' is a fresh name
+ /** Transform tree `t` to { def f = t; f } where `f` is a fresh name
*/
def liftTree(tree: Tree) = {
if (settings.debug.value)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 5b9ad616b4..021ab34c9f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -27,7 +27,7 @@ abstract class ConstantFolder {
case _ => null
})
- /** If tree is a constant value that can be converted to type `pt', perform
+ /** If tree is a constant value that can be converted to type `pt`, perform
* the conversion.
*
* @param tree ...
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 33822fbd43..3774411d51 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -369,14 +369,14 @@ trait Contexts { self: Analyzer =>
else "Context(%s@%s scope=%s)".format(owner.fullName, tree.getClass.getName split "[.$]" last, scope.##)
)
- /** Is `sub' a subclass of `base' or a companion object of such a subclass?
+ /** Is `sub` a subclass of `base` or a companion object of such a subclass?
*/
def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
sub.isNonBottomSubClass(base) ||
sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
- /** Return closest enclosing context that defines a superclass of `clazz', or a
- * companion module of a superclass of `clazz', or NoContext if none exists */
+ /** Return closest enclosing context that defines a superclass of `clazz`, or a
+ * companion module of a superclass of `clazz`, or NoContext if none exists */
def enclosingSuperClassContext(clazz: Symbol): Context = {
var c = this.enclClass
while (c != NoContext &&
@@ -386,7 +386,7 @@ trait Contexts { self: Analyzer =>
c
}
- /** Return closest enclosing context that defines a subclass of `clazz' or a companion
+ /** Return closest enclosing context that defines a subclass of `clazz` or a companion
* object thereof, or NoContext if no such context exists
*/
def enclosingSubClassContext(clazz: Symbol): Context = {
@@ -396,7 +396,7 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is <code>sym</code> accessible as a member of tree `site' with type
+ /** Is <code>sym</code> accessible as a member of tree `site` with type
* <code>pre</code> in current context?
*
* @param sym ...
@@ -416,7 +416,7 @@ trait Contexts { self: Analyzer =>
(linked ne NoSymbol) && accessWithin(linked)
}
- /** Are we inside definition of `ab'? */
+ /** Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
@@ -438,7 +438,7 @@ trait Contexts { self: Analyzer =>
c != NoContext
}
*/
- /** Is `clazz' a subclass of an enclosing class? */
+ /** Is `clazz` a subclass of an enclosing class? */
def isSubClassOfEnclosing(clazz: Symbol): Boolean =
enclosingSuperClassContext(clazz) != NoContext
diff --git a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
index 4b0846099f..aac92ccb59 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
@@ -135,10 +135,10 @@ abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
protected def factoryName(clazz: Symbol) =
atPhase(ownPhase) { newTermName("new$"+clazz.name) }
- /** Does `clazz' contain virtual classes? */
+ /** Does `clazz` contain virtual classes? */
protected def containsVirtuals(clazz: Symbol) = clazz.info.decls.toList exists (_.isVirtualClass)
- /** The inner classes that need factory methods in `clazz'
+ /** The inner classes that need factory methods in `clazz`
* This is intended to catch situations like the following
*
* abstract class C {
@@ -197,10 +197,10 @@ abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
protected def mkPolyType(tparams: List[Symbol], tp: Type) =
if (tparams.isEmpty) tp else PolyType(tparams, tp)
- /** A lazy type to complete `sym', which is is generated for virtual class
- * `clazz'.
- * The info of the symbol is computed by method `getInfo'.
- * It is wrapped in copies of the type parameters of `clazz'.
+ /** A lazy type to complete `sym`, which is is generated for virtual class
+ * `clazz`.
+ * The info of the symbol is computed by method `getInfo`.
+ * It is wrapped in copies of the type parameters of `clazz`.
*/
abstract class PolyTypeCompleter(sym: Symbol, clazz: Symbol) extends LazyType {
def getInfo: Type
@@ -359,7 +359,7 @@ abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
// all code is executed at phase ownPhase.next
/** Add trees for abstract types, worker traits, and factories (@see mkFactory)
- * to template body `stats'
+ * to template body `stats`
*/
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val stats1 = stats flatMap transformStat
@@ -408,7 +408,7 @@ abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
}
- /** The factory definition for virtual class `clazz' (@see mkFactory)
+ /** The factory definition for virtual class `clazz` (@see mkFactory)
* For a virtual class
*
* attrs mods class VC[Ts] <: Ps { decls }
@@ -454,7 +454,7 @@ abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
}
}
- /** Create an override bridge for method `meth' in concrete class `cclazz'.
+ /** Create an override bridge for method `meth` in concrete class `cclazz`.
* An override bridge has the form
*
* override f(xs1)...(xsN) = super.f(xs)...(xsN)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 4a966f1fca..9b0c44155b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -118,7 +118,7 @@ abstract class Duplicators extends Analyzer {
tpe3
}
- /** Return the new symbol corresponding to `sym'. */
+ /** Return the new symbol corresponding to `sym`. */
private def updateSym(sym: Symbol): Symbol =
if (invalidSyms.isDefinedAt(sym))
invalidSyms(sym).symbol
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index eee75e1b2a..ce206b55fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -80,7 +80,7 @@ trait EtaExpansion { self: Analyzer =>
// [...]
// val x$n = argn
// qual$1.fun(x$1, ..)..(.., x$n) }
- // Eta-expansion has to be performed on `fun'
+ // Eta-expansion has to be performed on `fun`
case Block(stats, fun) =>
defs ++= stats
liftoutPrefix(fun)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 7d4d42c224..87bde02b94 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -428,12 +428,12 @@ trait Implicits {
case _ => tp.isStable
}
- /** Does type `tp' match expected type `pt'
- * This is the case if either `pt' is a unary function type with a
- * HasMethodMatching type as result, and `tp' is a unary function
+ /** Does type `tp` match expected type `pt`
+ * This is the case if either `pt` is a unary function type with a
+ * HasMethodMatching type as result, and `tp` is a unary function
* or method type whose result type has a method whose name and type
* correspond to the HasMethodMatching type,
- * or otherwise if `tp' is compatible with `pt'.
+ * or otherwise if `tp` is compatible with `pt`.
* This method is performance critical: 5-8% of typechecking time.
*/
private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
@@ -620,15 +620,15 @@ trait Implicits {
}
}
- /** Is `sym' the standard conforms method in Predef?
- * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def'
+ /** Is `sym` the standard conforms method in Predef?
+ * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def`
* which does a member lookup (it can't be a lazy val because we might reload Predef
* during resident compilations).
*/
private def isConformsMethod(sym: Symbol) =
sym.name == nme.conforms && sym.owner == PredefModule.moduleClass
- /** Should implicit definition symbol `sym' be considered for applicability testing?
+ /** Should implicit definition symbol `sym` be considered for applicability testing?
* This is the case if one of the following holds:
* - the symbol's type is initialized
* - the symbol comes from a classfile
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 40b47d8a5c..03b90ab89c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -539,7 +539,7 @@ trait Infer {
*/
def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
pt: Type): List[Type] = {
- /** Map type variable to its instance, or, if `variance' is covariant/contravariant,
+ /** Map type variable to its instance, or, if `variance` is covariant/contravariant,
* to its upper/lower bound */
def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
lazy val hiBounds = tvar.constr.hiBounds
@@ -613,7 +613,7 @@ trait Infer {
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
* @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing' and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
@@ -637,7 +637,7 @@ trait Infer {
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
* If this is not possible, throw a <code>NoInstance</code> exception.
- * Undetermined type arguments are represented by `definitions.NothingClass.tpe'.
+ * Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
@@ -794,8 +794,8 @@ trait Infer {
* @param undetparams ...
* @param ftpe the type of the function (often a MethodType)
* @param argtpes the argument types; a NamedType(name, tp) for named
- * arguments. For each NamedType, if `name' does not exist in `ftpe', that
- * type is set to `Unit', i.e. the corresponding argument is treated as
+ * arguments. For each NamedType, if `name` does not exist in `ftpe`, that
+ * type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
* @param pt ...
* @return ...
@@ -1024,7 +1024,7 @@ trait Infer {
false
}
/*
- /** Is type `tpe1' a strictly better expression alternative than type `tpe2'?
+ /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
*/
def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
isMethod(tpe2) && !isMethod(tpe1) ||
@@ -1032,7 +1032,7 @@ trait Infer {
isStrictlyBetter(tpe1, tpe2)
}
- /** Is type `tpe1' a strictly better alternative than type `tpe2'?
+ /** Is type `tpe1` a strictly better alternative than type `tpe2`?
* non-methods are always strictly better than methods
* nullary methods are always strictly better than non-nullary
* if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
@@ -1130,12 +1130,12 @@ trait Infer {
}
errorMessages.toList
}
- /** Substitute free type variables `undetparams' of polymorphic argument
- * expression `tree', given two prototypes `strictPt', and `lenientPt'.
- * `strictPt' is the first attempt prototype where type parameters
- * are left unchanged. `lenientPt' is the fall-back prototype where type
- * parameters are replaced by `WildcardType's. We try to instantiate
- * first to `strictPt' and then, if this fails, to `lenientPt'. If both
+ /** Substitute free type variables `undetparams` of polymorphic argument
+ * expression `tree`, given two prototypes `strictPt`, and `lenientPt`.
+ * `strictPt` is the first attempt prototype where type parameters
+ * are left unchanged. `lenientPt` is the fall-back prototype where type
+ * parameters are replaced by `WildcardType`s. We try to instantiate
+ * first to `strictPt` and then, if this fails, to `lenientPt`. If both
* attempts fail, an error is produced.
*/
def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) {
@@ -1190,8 +1190,8 @@ trait Infer {
}
}
- /** Substitute free type variables `undetparams' of polymorphic argument
- * expression <code>tree</code> to `targs', Error if `targs' is null
+ /** Substitute free type variables `undetparams` of polymorphic argument
+ * expression `tree` to `targs`, Error if `targs` is null.
*
* @param tree ...
* @param undetparams ...
@@ -1580,8 +1580,8 @@ trait Infer {
/** Assign <code>tree</code> the symbol and type of the alternative which
* matches prototype <code>pt</code>, if it exists.
- * If several alternatives match `pt', take parameterless one.
- * If no alternative matches `pt', take the parameterless one anyway.
+ * If several alternatives match `pt`, take parameterless one.
+ * If no alternative matches `pt`, take the parameterless one anyway.
*/
def inferExprAlternative(tree: Tree, pt: Type): Unit = tree.tpe match {
case OverloadedType(pre, alts) => tryTwice {
@@ -1633,7 +1633,7 @@ trait Infer {
}
/** Assign <code>tree</code> the type of an alternative which is applicable
- * to <code>argtpes</code>, and whose result type is compatible with `pt'.
+ * to <code>argtpes</code>, and whose result type is compatible with `pt`.
* If several applicable alternatives exist, drop the alternatives which use
* default arguments, then select the most specialized one.
* If no applicable alternative exists, and pt != WildcardType, try again
@@ -1643,7 +1643,7 @@ trait Infer {
* @param argtpes contains the argument types. If an argument is named, as
* "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
* of some NamedType does not exist in an alternative's parameter names,
- * the type is replaces by `Unit', i.e. the argument is treated as an
+ * the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
@@ -1671,7 +1671,7 @@ trait Infer {
if (allApplicable.lengthCompare(1) <= 0) allApplicable
else allApplicable filter (alt => {
val mtypes = followApply(alt.tpe) match {
- // for functional values, the `apply' method might be overloaded
+ // for functional values, the `apply` method might be overloaded
case OverloadedType(_, alts) => alts map (_.tpe)
case t => List(t)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 54054cb26c..23a7766c3e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -46,7 +46,7 @@ trait Namers { self: Analyzer =>
def newNamer(context : Context) : Namer = new NormalNamer(context)
// In the typeCompleter (templateSig) of a case class (resp it's module),
- // synthetic `copy' (reps `apply', `unapply') methods are added. To compute
+ // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
// their signatures, the corresponding ClassDef is needed.
// During naming, for each case class module symbol, the corresponding ClassDef
// is stored in this map. The map is cleared lazily, i.e. when the new symbol
@@ -147,7 +147,7 @@ trait Namers { self: Analyzer =>
nme.isSetterName(newS.name) ||
newS.owner.isPackageClass) &&
!((newS.owner.isTypeParameter || newS.owner.isAbstractType) &&
- newS.name.length==1 && newS.name(0)=='_') //@M: allow repeated use of `_' for higher-order type params
+ newS.name.length==1 && newS.name(0)=='_') //@M: allow repeated use of `_` for higher-order type params
}
private def setInfo[Sym <: Symbol](sym : Sym)(tpe : LazyType) : Sym = sym.setInfo(tpe)
@@ -937,7 +937,7 @@ trait Namers { self: Analyzer =>
case _ =>
}
if (tpt.isEmpty) {
- // provisionally assign `meth' a method type with inherited result type
+ // provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
meth setInfo thisMethodType(resultPt)
}
@@ -1316,9 +1316,9 @@ trait Namers { self: Analyzer =>
/** Check that symbol's definition is well-formed. This means:
* - no conflicting modifiers
- * - `abstract' modifier only for classes
- * - `override' modifier never for classes
- * - `def' modifier never for parameters of case classes
+ * - `abstract` modifier only for classes
+ * - `override` modifier never for classes
+ * - `def` modifier never for parameters of case classes
* - declarations only in mixins or abstract classes (when not @native)
*/
def validate(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 8e286ca983..18840de28f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -51,7 +51,7 @@ trait NamesDefaults { self: Analyzer =>
res.toList
}
- /** returns `true' if every element is equal to its index */
+ /** returns `true` if every element is equal to its index */
def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
/**
@@ -106,7 +106,7 @@ trait NamesDefaults { self: Analyzer =>
* Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
- * `baseFun' is typed, the resulting block must be typed as well.
+ * `baseFun` is typed, the resulting block must be typed as well.
*
* Fun is transformed in the following way:
* - Ident(f) ==> Block(Nil, Ident(f))
@@ -114,7 +114,7 @@ trait NamesDefaults { self: Analyzer =>
* - Select(qual, f) otherwise ==> Block(ValDef(qual$1, qual), Select(qual$1, f))
* - TypeApply(fun, targs) ==> Block(Nil or qual$1, TypeApply(fun, targs))
* - Select(New(TypeTree()), <init>) ==> Block(Nil, Select(New(TypeTree()), <init>))
- * - Select(New(Select(qual, typeName)), <init>) ==> Block(Nil, Select(...)) NOTE: qual must be stable in a `new'
+ * - Select(New(Select(qual, typeName)), <init>) ==> Block(Nil, Select(...)) NOTE: qual must be stable in a `new`
*/
def baseFunBlock(baseFun: Tree): Tree = {
val isConstr = baseFun.symbol.isConstructor
@@ -299,7 +299,7 @@ trait NamesDefaults { self: Analyzer =>
if (isNamedApplyBlock(tree)) {
context.namedApplyBlockInfo.get._1
} else tree match {
- // `fun' is typed. `namelessArgs' might be typed or not, if they are types are kept.
+ // `fun` is typed. `namelessArgs` might be typed or not, if they are types are kept.
case Apply(fun, namelessArgs) =>
val transformedFun = transformNamedApplication(typer, mode, pt)(fun, x => x)
if (transformedFun.isErroneous) setError(tree)
@@ -365,7 +365,7 @@ trait NamesDefaults { self: Analyzer =>
}
/**
- * Extend the argument list `givenArgs' with default arguments. Defaults are added
+ * Extend the argument list `givenArgs` with default arguments. Defaults are added
* as named arguments calling the corresponding default getter.
*
* Example: given
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 4dcc8b3c20..e760164c7e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -163,12 +163,12 @@ abstract class RefChecks extends InfoTransform {
bridges.toList
}
- /** 1. Check all members of class `clazz' for overriding conditions.
+ /** 1. Check all members of class `clazz` for overriding conditions.
* That is for overriding member M and overridden member O:
*
* 1.1. M must have the same or stronger access privileges as O.
* 1.2. O must not be final.
- * 1.3. O is deferred, or M has `override' modifier.
+ * 1.3. O is deferred, or M has `override` modifier.
* 1.4. If O is stable, then so is M.
* // @M: LIFTED 1.5. Neither M nor O are a parameterized type alias
* 1.6. If O is a type alias, then M is an alias of O.
@@ -184,7 +184,7 @@ abstract class RefChecks extends InfoTransform {
* 2. Check that only abstract classes have deferred members
* 3. Check that concrete classes do not have deferred definitions
* that are not implemented in a subclass.
- * 4. Check that every member with an `override' modifier
+ * 4. Check that every member with an `override` modifier
* overrides some other member.
*/
private def checkAllOverrides(clazz: Symbol, typesOnly: Boolean = false) {
@@ -631,7 +631,7 @@ abstract class RefChecks extends InfoTransform {
(inclazz != clazz) && (matchingSyms != NoSymbol)
}
- // 4. Check that every defined member with an `override' modifier overrides some other member.
+ // 4. Check that every defined member with an `override` modifier overrides some other member.
for (member <- clazz.info.decls.toList)
if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
!(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
@@ -911,7 +911,7 @@ abstract class RefChecks extends InfoTransform {
def onTrees[T](f: List[Tree] => T) = f(List(qual, args.head))
def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
- // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol'
+ // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
/** Symbols which limit the warnings we can issue since they may be value types */
@@ -994,7 +994,7 @@ abstract class RefChecks extends InfoTransform {
// Transformation ------------------------------------------------------------
- /* Convert a reference to a case factory of type `tpe' to a new of the class it produces. */
+ /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
var rtpe = tpe.finalResultType
assert(rtpe.typeSymbol hasFlag CASE, tpe);
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 7bdca6dbab..310113c7d5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -22,7 +22,7 @@ import symtab.Flags._
* @version 1.0
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._
import definitions.{ UnitClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
@@ -402,7 +402,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
res
}
- /** Does `sym' need an accessor when accessed from `currentOwner'?
+ /** Does `sym` need an accessor when accessed from `currentOwner`?
* A special case arises for classes with explicit self-types. If the
* self type is a Java class, and a protected accessor is needed, we issue
* an error. If the self type is a Scala class, we don't add an accessor.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a26ce862b4..8bb9312dff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -221,7 +221,7 @@ trait Typers extends Modes {
}
}
- /** Check that `tpt' refers to a non-refinement class type */
+ /** Check that `tpt` refers to a non-refinement class type */
def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean) {
def check(tpe: Type): Unit = tpe.normalize match {
case TypeRef(pre, sym, _) if sym.isClass && !sym.isRefinementClass =>
@@ -296,9 +296,9 @@ trait Typers extends Modes {
case MethodType(formals, restpe) =>
/*
if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
- error(pos, "methods with `=>'-parameter can be converted to function values only if they take no other parameters")
+ error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters")
if (formals exists (isRepeatedParamType(_)))
- error(pos, "methods with `*'-parameters cannot be converted to function values");
+ error(pos, "methods with `*`-parameters cannot be converted to function values");
*/
if (restpe.isDependent)
error(pos, "method with dependent type "+tpe+" cannot be converted to function value")
@@ -657,8 +657,8 @@ trait Typers extends Modes {
}
}
- /** Perform the following adaptations of expression, pattern or type `tree' wrt to
- * given mode `mode' and given prototype `pt':
+ /** Perform the following adaptations of expression, pattern or type `tree` wrt to
+ * given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
* (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
* (1) Resolve overloading, unless mode contains FUNmode
@@ -668,9 +668,9 @@ trait Typers extends Modes {
* unless followed by explicit type application.
* (4) Do the following to unapplied methods used as values:
* (4.1) If the method has only implicit parameters pass implicit arguments
- * (4.2) otherwise, if `pt' is a function type and method is not a constructor,
+ * (4.2) otherwise, if `pt` is a function type and method is not a constructor,
* convert to function by eta-expansion,
- * (4.3) otherwise, if the method is nullary with a result type compatible to `pt'
+ * (4.3) otherwise, if the method is nullary with a result type compatible to `pt`
* and it is not a constructor, apply it to ()
* otherwise issue an error
* (5) Convert constructors in a pattern as follows:
@@ -993,12 +993,12 @@ trait Typers extends Modes {
}
}
- /** Try to apply an implicit conversion to `qual' to that it contains
- * a method `name` which can be applied to arguments `args' with expected type `pt'.
- * If `pt' is defined, there is a fallback to try again with pt = ?.
+ /** Try to apply an implicit conversion to `qual` to that it contains
+ * a method `name` which can be applied to arguments `args` with expected type `pt`.
+ * If `pt` is defined, there is a fallback to try again with pt = ?.
* This helps avoiding propagating result information too far and solves
* #1756.
- * If no conversion is found, return `qual' unchanged.
+ * If no conversion is found, return `qual` unchanged.
*
*/
def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type): Tree = {
@@ -1017,7 +1017,7 @@ trait Typers extends Modes {
doAdapt(pt)
}
- /** Try o apply an implicit conversion to `qual' to that it contains
+ /** Try o apply an implicit conversion to `qual` to that it contains
* a method `name`. If that's ambiguous try taking arguments into account using `adaptToArguments`.
*/
def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int): Tree = {
@@ -1043,9 +1043,9 @@ trait Typers extends Modes {
}
}
- /** Try to apply an implicit conversion to `qual' to that it contains a
+ /** Try to apply an implicit conversion to `qual` to that it contains a
* member `name` of arbitrary type.
- * If no conversion is found, return `qual' unchanged.
+ * If no conversion is found, return `qual` unchanged.
*/
def adaptToName(qual: Tree, name: Name) =
if (member(qual, name) != NoSymbol) qual
@@ -2224,7 +2224,7 @@ trait Typers extends Modes {
}
}
- /** Is `tree' a block created by a named application?
+ /** Is `tree` a block created by a named application?
*/
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
@@ -2305,7 +2305,7 @@ trait Typers extends Modes {
// repeat vararg as often as needed, remove by-name
val formals = formalTypes(paramTypes, args.length)
- /** Try packing all arguments into a Tuple and apply `fun'
+ /** Try packing all arguments into a Tuple and apply `fun`
* to that. This is the last thing which is tried (after
* default arguments)
*/
@@ -2780,16 +2780,16 @@ trait Typers extends Modes {
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
- /** Given a set `rawSyms' of term- and type-symbols, and a type `tp'.
+ /** Given a set `rawSyms` of term- and type-symbols, and a type `tp`.
* produce a set of fresh type parameters and a type so that it can be
* abstracted to an existential type.
- * Every type symbol `T' in `rawSyms' is mapped to a clone.
- * Every term symbol `x' of type `T' in `rawSyms' is given an
+ * Every type symbol `T` in `rawSyms` is mapped to a clone.
+ * Every term symbol `x` of type `T` in `rawSyms` is given an
* associated type symbol of the following form:
*
* type x.type <: T with <singleton>
*
- * The name of the type parameter is `x.type', to produce nice diagnostics.
+ * The name of the type parameter is `x.type`, to produce nice diagnostics.
* The <singleton> parent ensures that the type parameter is still seen as a stable type.
* Type symbols in rawSyms are fully replaced by the new symbols.
* Term symbols are also replaced, except when they are the term
@@ -2815,7 +2815,7 @@ trait Typers extends Modes {
(typeParams, tp.subst(rawSyms, typeParamTypes))
}
- /** Compute an existential type from raw hidden symbols `syms' and type `tp'
+ /** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
def packSymbols(hidden: List[Symbol], tp: Type): Type =
if (hidden.isEmpty) tp
@@ -2863,7 +2863,7 @@ trait Typers extends Modes {
mapOver(tp)
}
}
- // add all local symbols of `tp' to `localSyms'
+ // add all local symbols of `tp` to `localSyms`
// TODO: expand higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
@@ -3193,7 +3193,7 @@ trait Typers extends Modes {
}
/** If current tree <tree> appears in <val x(: T)? = <tree>>
- * return `tp with x.type' else return `tp'.
+ * return `tp with x.type' else return `tp`.
*/
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
@@ -3702,8 +3702,8 @@ trait Typers extends Modes {
}
// detect ambiguous definition/import,
- // update `defSym' to be the final resolved symbol,
- // update `pre' to be `sym's prefix type in case it is an imported member,
+ // update `defSym` to be the final resolved symbol,
+ // update `pre` to be `sym`s prefix type in case it is an imported member,
// and compute value of:
if (defSym.exists && impSym.exists) {
@@ -4107,7 +4107,7 @@ trait Typers extends Modes {
var qual1 = checkDead(typedQualifier(qual, mode))
if (name.isTypeName) qual1 = checkStable(qual1)
- val tree1 = // temporarily use `filter' and an alternative for `withFilter'
+ val tree1 = // temporarily use `filter` and an alternative for `withFilter`
if (name == nme.withFilter)
silent(_ => typedSelect(qual1, name)) match {
case result1: Tree =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 0391188945..d2c27d6a0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -26,21 +26,21 @@ trait Variances {
private def cut(v: Int): Int =
if (v == VARIANCES) v else 0
- /** Compute variance of type parameter `tparam' in types of all symbols `sym'. */
+ /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
(VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
- /** Compute variance of type parameter `tparam' in type of symbol `sym'. */
+ /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
else varianceInType(sym.info)(tparam)
- /** Compute variance of type parameter `tparam' in all types `tps'. */
+ /** Compute variance of type parameter `tparam` in all types `tps`. */
def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
(VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
- /** Compute variance of type parameter `tparam' in all type arguments
- * <code>tps</code> which correspond to formal type parameters `tparams1'.
+ /** Compute variance of type parameter `tparam` in all type arguments
+ * <code>tps</code> which correspond to formal type parameters `tparams1`.
*/
def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
var v: Int = VARIANCES;
@@ -53,12 +53,12 @@ trait Variances {
v
}
- /** Compute variance of type parameter `tparam' in all type annotations `annots'. */
+ /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
(VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
}
- /** Compute variance of type parameter `tparam' in type annotation `annot'. */
+ /** Compute variance of type parameter `tparam` in type annotation `annot`. */
def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
varianceInType(annot.atp)(tparam)
}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 5a0a864d11..fdb03dedaa 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -30,7 +30,7 @@ object DocStrings {
/** Returns index of string `str` after `start` skipping longest
* sequence of space and tab characters, possibly also containing
- * a single `*' character or the `/``**` sequence.
+ * a single `*` character or the `/``**` sequence.
* @pre start == str.length || str(start) == `\n'
*/
def skipLineLead(str: String, start: Int): Int =
@@ -51,7 +51,7 @@ object DocStrings {
else start
/** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
- * which satisfies predicate `p'.
+ * which satisfies predicate `p`.
*/
def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
val idx = skipLineLead(str, skipToEol(str, start))
@@ -60,7 +60,7 @@ object DocStrings {
}
/** Return first index following `start` and starting a line (i.e. after skipLineLead)
- * which satisfies predicate `p'.
+ * which satisfies predicate `p`.
*/
def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
val idx = findNext(str, start)(p)
@@ -70,7 +70,7 @@ object DocStrings {
/** Produces a string index, which is a list of ``sections'', i.e
* pairs of start/end positions of all tagged sections in the string.
- * Every section starts with a `@' and extends to the next `@', or
+ * Every section starts with a `@` and extends to the next `@`, or
* to the end of the comment string, but excluding the final two
* characters which terminate the comment.
*/
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 49e9c12eb4..68d0359dfe 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -12,7 +12,7 @@ object Position {
}
/** The Position class and its subclasses represent positions of ASTs and symbols.
* Except for NoPosition and FakePos, every position refers to a SourceFile
- * and to an offset in the sourcefile (its `point'). For batch compilation,
+ * and to an offset in the sourcefile (its `point`). For batch compilation,
* that's all. For interactive IDE's there are also RangePositions
* and TransparentPositions. A RangePosition indicates a start and an end
* in addition to its point. TransparentPositions are a subclass of RangePositions.
@@ -87,7 +87,7 @@ trait Position {
/** The point (where the ^ is) of the position */
def point: Int = throw new UnsupportedOperationException("Position.point")
- /** The point (where the ^ is) of the position, or else `default' if undefined */
+ /** The point (where the ^ is) of the position, or else `default` if undefined */
def pointOrElse(default: Int): Int = default
/** The end of the position's range, error if not a range position */
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index d40c573ffe..5f57b0ac10 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -14,7 +14,7 @@ import scala.tools.nsc.ast._
*/
abstract class SelectiveANFTransform extends PluginComponent with Transform with
TypingTransformers with CPSUtils {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 78cc8f7ff7..167ebe08c6 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -16,7 +16,7 @@ import scala.tools.nsc.ast._
*/
abstract class SelectiveCPSTransform extends PluginComponent with
InfoTransform with TypingTransformers with CPSUtils {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 5ff5778800..c8f61d0625 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -422,7 +422,7 @@ object Array extends FallbackArrayBuilding {
* @param elem the element composing the resulting array
* @return an array composed of n elements all equal to elem
*/
- @deprecated("use `Array.fill' instead", "2.8.0")
+ @deprecated("use `Array.fill` instead", "2.8.0")
def make[T: ClassManifest](n: Int, elem: T): Array[T] = {
val a = new Array[T](n)
var i = 0
@@ -436,7 +436,7 @@ object Array extends FallbackArrayBuilding {
/** Creates an array containing the values of a given function `f`
* over given range `[0..n)`
*/
- @deprecated("use `Array.tabulate' instead", "2.8.0")
+ @deprecated("use `Array.tabulate` instead", "2.8.0")
def fromFunction[T: ClassManifest](f: Int => T)(n: Int): Array[T] = {
val a = new Array[T](n)
var i = 0
@@ -450,28 +450,28 @@ object Array extends FallbackArrayBuilding {
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2)`
*/
- @deprecated("use `Array.tabulate' instead", "2.8.0")
+ @deprecated("use `Array.tabulate` instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int) => T)(n1: Int, n2: Int): Array[Array[T]] =
fromFunction(i => fromFunction(f(i, _))(n2))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3)`
*/
- @deprecated("use `Array.tabulate' instead", "2.8.0")
+ @deprecated("use `Array.tabulate` instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
fromFunction(i => fromFunction(f(i, _, _))(n2, n3))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3, 0..n4)`
*/
- @deprecated("use `Array.tabulate' instead", "2.8.0")
+ @deprecated("use `Array.tabulate` instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
fromFunction(i => fromFunction(f(i, _, _, _))(n2, n3, n4))(n1)
/** Creates an array containing the values of a given function `f`
* over given range `[0..n1, 0..n2, 0..n3, 0..n4, 0..n5)`
*/
- @deprecated("use `Array.tabulate' instead", "2.8.0")
+ @deprecated("use `Array.tabulate` instead", "2.8.0")
def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
fromFunction(i => fromFunction(f(i, _, _, _, _))(n2, n3, n4, n5))(n1)
}
@@ -485,56 +485,56 @@ object Array extends FallbackArrayBuilding {
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int) = {
this(dim1);
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int) = {
this(dim1)
throw new Error()
}
/** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
+ @deprecated("use `Array.ofDim` instead", "2.8.0")
def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int, dim9: Int) = {
this(dim1)
throw new Error()
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index ab9741fc9f..7381be7f83 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -1,8 +1,9 @@
package scala
/** Classes and traits inheriting the `DelayedInit` marker trait
- * will have their initialization code rewritten as follows.
- * <code> becomes delayedInit(<code>)
+ * will have their initialization code rewritten as follows:
+ * `code` becomes `delayedInit(code)`
+ *
* Initialization code comprises all statements and all value definitions
* that are executed during initialization.
*/
diff --git a/src/library/scala/Math.scala b/src/library/scala/Math.scala
index b8493185a2..94620a0388 100644
--- a/src/library/scala/Math.scala
+++ b/src/library/scala/Math.scala
@@ -13,83 +13,83 @@ package scala
* operations such as the elementary exponential, logarithm, square root, and
* trigonometric functions.
*/
-@deprecated("use the scala.math package object instead.\n(Example package object usage: scala.math.Pi )", "2.8.0")
+@deprecated("use the [[scala.math]] package object instead.\n(Example package object usage: `scala.math.Pi`)", "2.8.0")
object Math extends MathCommon {
- @deprecated("Use scala.Byte.MinValue instead", "2.8.0")
+ @deprecated("Use `scala.Byte.MinValue` instead", "2.8.0")
val MIN_BYTE = java.lang.Byte.MIN_VALUE
- @deprecated("Use scala.Byte.MaxValue instead", "2.8.0")
+ @deprecated("Use `scala.Byte.MaxValue` instead", "2.8.0")
val MAX_BYTE = java.lang.Byte.MAX_VALUE
- @deprecated("Use scala.Short.MinValue instead", "2.8.0")
+ @deprecated("Use `scala.Short.MinValue` instead", "2.8.0")
val MIN_SHORT = java.lang.Short.MIN_VALUE
- @deprecated("Use scala.Short.MaxValue instead", "2.8.0")
+ @deprecated("Use `scala.Short.MaxValue` instead", "2.8.0")
val MAX_SHORT = java.lang.Short.MAX_VALUE
- @deprecated("Use scala.Char.MinValue instead", "2.8.0")
+ @deprecated("Use `scala.Char.MinValue` instead", "2.8.0")
val MIN_CHAR = java.lang.Character.MIN_VALUE
- @deprecated("Use scala.Char.MaxValue instead", "2.8.0")
+ @deprecated("Use `scala.Char.MaxValue` instead", "2.8.0")
val MAX_CHAR = java.lang.Character.MAX_VALUE
- @deprecated("Use scala.Int.MinValue instead", "2.8.0")
+ @deprecated("Use `scala.Int.MinValue` instead", "2.8.0")
val MIN_INT = java.lang.Integer.MIN_VALUE
- @deprecated("Use scala.Int.MaxValue instead", "2.8.0")
+ @deprecated("Use `scala.Int.MaxValue` instead", "2.8.0")
val MAX_INT = java.lang.Integer.MAX_VALUE
- @deprecated("Use scala.Long.MinValue instead", "2.8.0")
+ @deprecated("Use `scala.Long.MinValue` instead", "2.8.0")
val MIN_LONG = java.lang.Long.MIN_VALUE
- @deprecated("Use scala.Long.MaxValue instead", "2.8.0")
+ @deprecated("Use `scala.Long.MaxValue` instead", "2.8.0")
val MAX_LONG = java.lang.Long.MAX_VALUE
- /** The smallest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinValue instead", "2.8.0")
+ /** The smallest possible value for [[scala.Float]]. */
+ @deprecated("Use `scala.Float.MinValue` instead", "2.8.0")
val MIN_FLOAT = -java.lang.Float.MAX_VALUE
- /** The smallest difference between two values of <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinPositiveValue instead", "2.8.0")
+ /** The smallest difference between two values of [[scala.Float]]. */
+ @deprecated("Use `scala.Float.MinPositiveValue` instead", "2.8.0")
val EPS_FLOAT = java.lang.Float.MIN_VALUE
- /** The greatest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MaxValue instead", "2.8.0")
+ /** The greatest possible value for [[scala.Float]]. */
+ @deprecated("Use `scala.Float.MaxValue` instead", "2.8.0")
val MAX_FLOAT = java.lang.Float.MAX_VALUE
- /** A value of type <a href="Float.html" target="_self">scala.Float</a> that represents no number. */
- @deprecated("Use scala.Float.NaN instead", "2.8.0")
+ /** A value of type [[scala.Float]] that represents no number. */
+ @deprecated("Use `scala.Float.NaN` instead", "2.8.0")
val NaN_FLOAT = java.lang.Float.NaN
- /** Negative infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.NegativeInfinity instead", "2.8.0")
+ /** Negative infinity of type [[scala.Float]]. */
+ @deprecated("Use `scala.Float.NegativeInfinity` instead", "2.8.0")
val NEG_INF_FLOAT = java.lang.Float.NEGATIVE_INFINITY
- /** Positive infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.PositiveInfinity instead", "2.8.0")
+ /** Positive infinity of type [[scala.Float]]. */
+ @deprecated("Use `scala.Float.PositiveInfinity` instead", "2.8.0")
val POS_INF_FLOAT = java.lang.Float.POSITIVE_INFINITY
- /** The smallest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinValue instead", "2.8.0")
+ /** The smallest possible value for [[scala.Double]]. */
+ @deprecated("Use `scala.Double.MinValue` instead", "2.8.0")
val MIN_DOUBLE = -java.lang.Double.MAX_VALUE
- /** The smallest difference between two values of <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinPositiveValue instead", "2.8.0")
+ /** The smallest difference between two values of [[scala.Double]]. */
+ @deprecated("Use `scala.Double.MinPositiveValue` instead", "2.8.0")
val EPS_DOUBLE = java.lang.Double.MIN_VALUE
- /** The greatest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MaxValue instead", "2.8.0")
+ /** The greatest possible value for [[scala.Double]]. */
+ @deprecated("Use `scala.Double.MaxValue` instead", "2.8.0")
val MAX_DOUBLE = java.lang.Double.MAX_VALUE
- /** A value of type <a href="Double.html" target="_self">scala.Double</a> that represents no number. */
- @deprecated("Use scala.Double.NaN instead", "2.8.0")
+ /** A value of type [[scala.Double]] that represents no number. */
+ @deprecated("Use `scala.Double.NaN` instead", "2.8.0")
val NaN_DOUBLE = java.lang.Double.NaN
- /** Negative infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.NegativeInfinity instead", "2.8.0")
+ /** Negative infinity of type [[scala.Double]]. */
+ @deprecated("Use `scala.Double.NegativeInfinity` instead", "2.8.0")
val NEG_INF_DOUBLE = java.lang.Double.NEGATIVE_INFINITY
- /** Positive infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.PositiveInfinity instead", "2.8.0")
+ /** Positive infinity of type [[scala.Double]]. */
+ @deprecated("Use `scala.Double.PositiveInfinity` instead", "2.8.0")
val POS_INF_DOUBLE = java.lang.Double.POSITIVE_INFINITY
} \ No newline at end of file
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 55c12b8e8c..b64daa8df9 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -55,8 +55,7 @@ self =>
override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]]
override protected[this] def toCollection(repr: Repr): Iterable[A] = repr.asInstanceOf[Iterable[A]]
- /** Creates a new iterator over all elements contained in this
- * iterable object.
+ /** Creates a new iterator over all elements contained in this iterable object.
*
* @return the new iterator
*/
@@ -298,20 +297,22 @@ self =>
override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until)
- @deprecated("use `iterator' instead", "2.8.0")
+ @deprecated("use `iterator` instead", "2.8.0")
def elements = iterator
- @deprecated("use `head' instead", "2.8.0") def first: A = head
+ @deprecated("use `head` instead", "2.8.0")
+ def first: A = head
/** `None` if iterable is empty.
*/
- @deprecated("use `headOption' instead", "2.8.0") def firstOption: Option[A] = headOption
+ @deprecated("use `headOption` instead", "2.8.0")
+ def firstOption: Option[A] = headOption
/**
* returns a projection that can be used to call non-strict `filter`,
* `map`, and `flatMap` methods that build projections
* of the collection.
*/
- @deprecated("use `view' instead", "2.8.0")
+ @deprecated("use `view` instead", "2.8.0")
def projection = view
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index f7f8e7a971..b334320060 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -12,8 +12,7 @@ import mutable.ArrayBuffer
import annotation.{ tailrec, migration }
import immutable.Stream
-/** The `Iterator` object provides various functions for
- * creating specialized iterators.
+/** The `Iterator` object provides various functions for creating specialized iterators.
*
* @author Martin Odersky
* @author Matthias Zenger
@@ -22,7 +21,7 @@ import immutable.Stream
*/
object Iterator {
- /** The iterator which produces no values */
+ /** The iterator which produces no values. */
val empty = new Iterator[Nothing] {
def hasNext: Boolean = false
def next(): Nothing = throw new NoSuchElementException("next on empty iterator")
@@ -30,6 +29,7 @@ object Iterator {
/** Creates an iterator which produces a single element.
* '''Note:''' Equivalent, but more efficient than Iterator(elem)
+ *
* @param elem the element
* @return An iterator which produces `elem` on the first call to `next`,
* and which has no further elements.
@@ -42,15 +42,16 @@ object Iterator {
else empty.next()
}
- /** Creates an iterator with given elements
+ /** Creates an iterator with given elements.
+ *
* @param elems The elements returned one-by-one from the iterator
* @return An iterator which produces the given elements on the
* first calls to `next`, and which has no further elements.
*/
def apply[A](elems: A*): Iterator[A] = elems.iterator
- /** Creates iterator that produces the results of some element computation
- * a number of times.
+ /** Creates iterator that produces the results of some element computation a number of times.
+ *
* @param n the number of elements returned by the iterator.
* @param elem the element computation
* @return An iterator that produces the results of `n` evaluations of `elem`.
@@ -64,6 +65,7 @@ object Iterator {
}
/** Creates an iterator producing the values of a given function over a range of integer values starting from 0.
+ *
* @param n The number of elements returned by the iterator
* @param f The function computing element values
* @return An iterator that produces the values `f(0), ..., f(n -1)`.
@@ -137,8 +139,8 @@ object Iterator {
def next(): Int = { val result = i; i += step; result }
}
- /** Creates an infinite-length iterator returning the results of evaluating
- * an expression. The expression is recomputed for every element.
+ /** Creates an infinite-length iterator returning the results of evaluating an expression.
+ * The expression is recomputed for every element.
*
* @param elem the element computation.
* @return the iterator containing an infinite number of results of evaluating `elem`.
@@ -148,13 +150,13 @@ object Iterator {
def next = elem
}
- @deprecated("use `xs.iterator' or `Iterator(xs)' instead", "2.8.0")
+ @deprecated("use `xs.iterator` or `Iterator(xs)` instead", "2.8.0")
def fromValues[a](xs: a*) = xs.iterator
/** @param xs the array of elements
* @see also: IndexedSeq.iterator and slice
*/
- @deprecated("use `xs.iterator' instead", "2.8.0")
+ @deprecated("use `xs.iterator` instead", "2.8.0")
def fromArray[a](xs: Array[a]): Iterator[a] =
fromArray(xs, 0, xs.length)
@@ -164,7 +166,7 @@ object Iterator {
* @param length the length
* @see also: IndexedSeq.iterator and slice
*/
- @deprecated("use `xs.slice(start, start + length).iterator' instead", "2.8.0")
+ @deprecated("use `xs.slice(start, start + length).iterator` instead", "2.8.0")
def fromArray[a](xs: Array[a], start: Int, length: Int): Iterator[a] =
xs.slice(start, start + length).iterator
@@ -172,7 +174,7 @@ object Iterator {
* @param n the product arity
* @return the iterator on `Product&lt;n&gt;`.
*/
- @deprecated("use product.productIterator instead", "2.8.0")
+ @deprecated("use `product.productIterator instead`", "2.8.0")
def fromProduct(n: Product): Iterator[Any] = new Iterator[Any] {
private var c: Int = 0
private val cmax = n.productArity
@@ -180,18 +182,15 @@ object Iterator {
def next() = { val a = n productElement c; c += 1; a }
}
- /** Create an iterator with elements
- * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
- * where `e<sub>0</sub> = start`
- * and elements are in the range between `start` (inclusive)
- * and `end` (exclusive)
+ /** Create an iterator with elements `e<sub>n+1</sub> = step(e<sub>n</sub>)` where `e<sub>0</sub> = start`
+ * and elements are in the range between `start` (inclusive) and `end` (exclusive).
*
* @param start the start value of the iterator
* @param end the end value of the iterator
* @param step the increment function of the iterator, must be monotonically increasing or decreasing
* @return the iterator with values in range `[start;end)`.
*/
- @deprecated("use Iterator.iterate(start, end - start)(step) instead", "2.8.0")
+ @deprecated("use `Iterator.iterate(start, end - start)(step)` instead", "2.8.0")
def range(start: Int, end: Int, step: Int => Int) = new Iterator[Int] {
private val up = step(start) > start
private val down = step(start) < start
@@ -202,27 +201,25 @@ object Iterator {
else empty.next()
}
- /** Create an iterator with elements
- * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
- * where `e<sub>0</sub> = start`.
+ /** Create an iterator with elements `e<sub>n+1</sub> = step(e<sub>n</sub>)` where `e<sub>0</sub> = start`.
*
* @param start the start value of the iterator
* @param step the increment function of the iterator
* @return the iterator starting at value `start`.
*/
- @deprecated("use iterate(start)(step) instead", "2.8.0")
+ @deprecated("use `iterate(start)(step)` instead", "2.8.0")
def from(start: Int, step: Int => Int): Iterator[Int] = new Iterator[Int] {
private var i = start
override def hasNext: Boolean = true
def next(): Int = { val j = i; i = step(i); j }
}
- /** Create an iterator that is the concatenation of all iterators
- * returned by a given iterator of iterators.
- * @param its The iterator which returns on each call to next
- * a new iterator whose elements are to be concatenated to the result.
+ /** Create an iterator that is the concatenation of all iterators returned by a given iterator of iterators.
+ *
+ * @param its The iterator which returns on each call to next
+ * a new iterator whose elements are to be concatenated to the result.
*/
- @deprecated("use its.flatten instead", "2.8.0")
+ @deprecated("use `its.flatten` instead", "2.8.0")
def flatten[T](its: Iterator[Iterator[T]]): Iterator[T] = new Iterator[T] {
private var cur = its.next
def hasNext: Boolean = {
@@ -255,23 +252,27 @@ trait Iterator[+A] extends TraversableOnce[A] {
def seq: Iterator[A] = this
/** Tests whether this iterator can provide another element.
+ *
* @return `true` if a subsequent call to `next` will yield an element,
* `false` otherwise.
*/
def hasNext: Boolean
/** Produces the next element of this iterator.
+ *
* @return the next element of this iterator, if `hasNext` is `true`,
* undefined behavior otherwise.
*/
def next(): A
/** Tests whether this iterator is empty.
+ *
* @return `true` if hasNext is false, `false` otherwise.
*/
def isEmpty: Boolean = !hasNext
/** Tests whether this Iterator can be repeatedly traversed.
+ *
* @return `false`
*/
def isTraversableAgain = false
@@ -283,14 +284,14 @@ trait Iterator[+A] extends TraversableOnce[A] {
def hasDefiniteSize = isEmpty
/** Selects first ''n'' values of this iterator.
+ *
* @param n the number of values to take
* @return an iterator producing only of the first `n` values of this iterator, or else the
* whole iterator, if it produces fewer than `n` values.
*/
def take(n: Int): Iterator[A] = slice(0, n)
- /** Advances this iterator past the first ''n'' elements,
- * or the length of the iterator, whichever is smaller.
+ /** Advances this iterator past the first ''n'' elements, or the length of the iterator, whichever is smaller.
*
* @param n the number of elements to drop
* @return an iterator which produces all values of the current iterator, except
@@ -299,6 +300,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def drop(n: Int): Iterator[A] = slice(n, Int.MaxValue)
/** Creates an iterator returning an interval of the values produced by this iterator.
+ *
* @param from the index of the first element in this iterator which forms part of the slice.
* @param until the index of the first element following the slice.
* @return an iterator which advances this iterator past the first `from` elements using `drop`,
@@ -326,6 +328,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates a new iterator that maps all produced values of this iterator
* to new values using a transformation function.
+ *
* @param f the transformation function
* @return a new iterator which transforms every value produced by this
* iterator by applying the function `f` to it.
@@ -336,6 +339,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
/** Concatenates this iterator with another.
+ *
* @param that the other iterator
* @return a new iterator that first yields the values produced by this
* iterator followed by the values produced by iterator `that`.
@@ -372,9 +376,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
def next(): B = (if (hasNext) cur else empty).next()
}
- /** Returns an iterator over all the elements of this iterator that
- * satisfy the predicate `p`. The order of the elements
- * is preserved.
+ /** Returns an iterator over all the elements of this iterator that satisfy the predicate `p`.
+ * The order of the elements is preserved.
*
* @param p the predicate used to test values.
* @return an iterator which produces those values of this iterator which satisfy the predicate `p`.
@@ -1033,7 +1036,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Returns a counted iterator from this iterator.
*/
- @deprecated("use zipWithIndex in Iterator", "2.8.0")
+ @deprecated("use `zipWithIndex` in `Iterator`", "2.8.0")
def counted = new CountedIterator[A] {
private var cnt = 0
def count = cnt
@@ -1050,7 +1053,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @param start the starting index.
* @param sz the maximum number of elements to be read.
*/
- @deprecated("use copyToArray instead", "2.8.0")
+ @deprecated("use `copyToArray` instead", "2.8.0")
def readInto[B >: A](xs: Array[B], start: Int, sz: Int) {
var i = start
while (hasNext && i - start < sz) {
@@ -1059,7 +1062,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
- @deprecated("use copyToArray instead", "2.8.0")
+ @deprecated("use `copyToArray` instead", "2.8.0")
def readInto[B >: A](xs: Array[B], start: Int) {
readInto(xs, start, xs.length - start)
}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 92be57aa89..f6a5c57ddf 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -248,7 +248,7 @@ self =>
def get(key: A) = self.get(key).map(f)
}
- @deprecated("use `mapValues' instead", "2.8.0")
+ @deprecated("use `mapValues` instead", "2.8.0")
def mapElements[C](f: B => C) = mapValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 37caf82027..08db419c03 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -290,7 +290,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
*/
def reverseIterator: Iterator[A] = toCollection(reverse).iterator
- @deprecated("use `reverseIterator' instead", "2.8.0")
+ @deprecated("use `reverseIterator` instead", "2.8.0")
def reversedElements = reverseIterator
def startsWith[B](that: GenSeq[B], offset: Int): Boolean = {
@@ -687,7 +687,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
* <code>map</code>, and <code>flatMap</code> methods that build projections
* of the collection.
*/
- @deprecated("use `view' instead", "2.8.0")
+ @deprecated("use `view` instead", "2.8.0")
override def projection = view
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 84fe4bdf4c..01619b0ab0 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -124,7 +124,7 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
* @param pf the partial function
* @return an option value containing pf applied to the first
* value for which it is defined, or `None` if none exists.
- * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
+ * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
*/
def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
for (x <- self.toIterator) { // make sure to use an iterator or `seq`
@@ -266,10 +266,9 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def mkString: String = mkString("")
- /** Appends all elements of this $coll to a string builder using start, end,
- * and separator strings.
- * The written text begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string `end`.
+ * Inside, the string representations (w.r.t. the method `toString`)
* of all elements of this $coll are separated by the string `sep`.
*
* @param b the string builder to which elements are appended.
@@ -297,10 +296,9 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
b
}
- /** Appends all elements of this $coll to a string builder using a separator
- * string. The written text consists of the string representations (w.r.t.
- * the method `toString`) of all elements of this $coll, separated by the
- * string `sep`.
+ /** Appends all elements of this $coll to a string builder using a separator string.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll, separated by the string `sep`.
*
* @param b the string builder to which elements are appended.
* @param sep the separator string.
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index 3fad7d462d..99d3551275 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -24,7 +24,7 @@ object TraversableView {
class NoBuilder[A] extends Builder[A, Nothing] {
def +=(elem: A): this.type = this
def iterator: Iterator[A] = Iterator.empty
- @deprecated("use `iterator' instead", "2.8.0")
+ @deprecated("use `iterator` instead", "2.8.0")
def elements = iterator
def result() = throw new UnsupportedOperationException("TraversableView.Builder.result")
def clear() {}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 353ab9980f..b36a1e297f 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -11,7 +11,7 @@ package generic
import mutable.Builder
-/** A template class for companion objects of ``regular'' collection classes
+/** A template class for companion objects of "regular" collection classes
* represent an unconstrained higher-kinded type. Typically
* such classes inherit from trait `GenericTraversableTemplate`.
* @tparam CC The type constructor representing the collection class.
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index ea0bc7523c..5446b25888 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -250,7 +250,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
@deprecated("use `span { x => !p(x) }` instead", "2.8.0")
def break(p: A => Boolean): (List[A], List[A]) = span { x => !p(x) }
- @deprecated("use `filterNot' instead", "2.8.0")
+ @deprecated("use `filterNot` instead", "2.8.0")
def remove(p: A => Boolean): List[A] = filterNot(p)
/** Computes the difference between this list and the given list
@@ -289,10 +289,10 @@ sealed abstract class List[+A] extends LinearSeq[A]
b.toList
}
- @deprecated("use `distinct' instead", "2.8.0")
+ @deprecated("use `distinct` instead", "2.8.0")
def removeDuplicates: List[A] = distinct
- @deprecated("use `sortWith' instead", "2.8.0")
+ @deprecated("use `sortWith` instead", "2.8.0")
def sort(lt : (A,A) => Boolean): List[A] = {
/** Merge two already-sorted lists */
def merge(l1: List[A], l2: List[A]): List[A] = {
@@ -446,7 +446,7 @@ object List extends SeqFactory[List] {
* or decreasing.
* @return the sorted list of all integers in range [start;end).
*/
- @deprecated("use `iterate' instead", "2.8.0")
+ @deprecated("use `iterate` instead", "2.8.0")
def range(start: Int, end: Int, step: Int => Int): List[Int] = {
val up = step(start) > start
val down = step(start) < start
@@ -468,7 +468,7 @@ object List extends SeqFactory[List] {
* @param elem the element composing the resulting list
* @return a list composed of n elements all equal to elem
*/
- @deprecated("use `fill' instead", "2.8.0")
+ @deprecated("use `fill` instead", "2.8.0")
def make[A](n: Int, elem: A): List[A] = {
val b = new ListBuffer[A]
var i = 0
@@ -484,7 +484,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists that are to be concatenated
* @return the concatenation of all the lists
*/
- @deprecated("use `xss.flatten' instead of `List.flatten(xss)'", "2.8.0")
+ @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
def flatten[A](xss: List[List[A]]): List[A] = {
val b = new ListBuffer[A]
for (xs <- xss) {
@@ -502,7 +502,7 @@ object List extends SeqFactory[List] {
* @param xs the list of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
val b1 = new ListBuffer[A]
val b2 = new ListBuffer[B]
@@ -520,7 +520,7 @@ object List extends SeqFactory[List] {
* @param xs the iterable of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
@@ -530,7 +530,7 @@ object List extends SeqFactory[List] {
* Returns the `Left` values in the given `Iterable`
* of `Either`s.
*/
- @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'", "2.8.0")
+ @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
@@ -540,7 +540,7 @@ object List extends SeqFactory[List] {
/**
* Returns the `Right` values in the given`Iterable` of `Either`s.
*/
- @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'", "2.8.0")
+ @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
@@ -565,7 +565,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the elements returned by successive
* calls to `it.next`
*/
- @deprecated("use `it.toList' instead of `List.toList(it)'", "2.8.0")
+ @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
def fromIterator[A](it: Iterator[A]): List[A] = it.toList
/** Converts an array into a list.
@@ -574,7 +574,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.toList' instead of `List.fromArray(array)'", "2.8.0")
+ @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
/** Converts a range of an array into a list.
@@ -585,7 +585,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.view(start, end).toList' instead of `List.fromArray(array, start, end)'", "2.8.0")
+ @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
var res: List[A] = Nil
var i = start + len
@@ -603,7 +603,7 @@ object List extends SeqFactory[List] {
* @param separator the separator character
* @return the list of substrings
*/
- @deprecated("use `str.split(separator).toList' instead of `List.fromString(str, separator)'", "2.8.0")
+ @deprecated("use `str.split(separator).toList` instead of `List.fromString(str, separator)`", "2.8.0")
def fromString(str: String, separator: Char): List[String] = {
var words: List[String] = Nil
var pos = str.length()
@@ -621,7 +621,7 @@ object List extends SeqFactory[List] {
* @param xs the list to convert.
* @return the list in form of a string.
*/
- @deprecated("use `xs.mkString' instead of `List.toString(xs)'", "2.8.0")
+ @deprecated("use `xs.mkString` instead of `List.toString(xs)`", "2.8.0")
def toString(xs: List[Char]): String = {
val sb = new StringBuilder()
var xc = xs
@@ -635,7 +635,7 @@ object List extends SeqFactory[List] {
/** Like xs map f, but returns `xs` unchanged if function
* `f` maps all elements to themselves.
*/
- @deprecated("use `xs.mapConserve(f)' instead of `List.mapConserve(xs, f)'", "2.8.0")
+ @deprecated("use `xs.mapConserve(f)` instead of `List.mapConserve(xs, f)`", "2.8.0")
def mapConserve[A <: AnyRef](xs: List[A])(f: A => A): List[A] = {
def loop(ys: List[A]): List[A] =
if (ys.isEmpty) xs
@@ -669,7 +669,7 @@ object List extends SeqFactory[List] {
* `[a0, ..., ak]`, `[b0, ..., bl]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.map(f)' instead of `List.map2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
val b = new ListBuffer[C]
var xc = xs
@@ -693,7 +693,7 @@ object List extends SeqFactory[List] {
* `[c<sub>0</sub>, ..., c<sub>m</sub>]` and
* `n = min(k,l,m)`
*/
- @deprecated("use `(xs, ys, zs).zipped.map(f)' instead of `List.map3(xs, ys, zs)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys, zs).zipped.map(f)` instead of `List.map3(xs, ys, zs)(f)`", "2.8.0")
def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
val b = new ListBuffer[D]
var xc = xs
@@ -718,7 +718,7 @@ object List extends SeqFactory[List] {
* `[b<sub>0</sub>, ..., b<sub>l</sub>]`
* and `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.forall(f)' instead of `List.forall2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -740,7 +740,7 @@ object List extends SeqFactory[List] {
* `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.exists(f)' instead of `List.exists2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -758,7 +758,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists
* @return the transposed list of lists
*/
- @deprecated("use `xss.transpose' instead of `List.transpose(xss)'", "2.8.0")
+ @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
def transpose[A](xss: List[List[A]]): List[List[A]] = {
val buf = new ListBuffer[List[A]]
var yss = xss
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index df1cfa80cd..ea16e04ebe 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -93,7 +93,7 @@ object Map extends ImmutableMapFactory[Map] {
def - (key: Any): Map[Any, Nothing] = this
}
- @deprecated("use `Map.empty' instead", "2.8.0")
+ @deprecated("use `Map.empty` instead", "2.8.0")
class EmptyMap[A,B] extends Map[A,B] with Serializable {
override def size: Int = 0
def get(key: A): Option[B] = None
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 7551238f55..377412261c 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -59,8 +59,8 @@ object PagedSeq {
fromStrings(source.iterator)
/** Constructs a character sequence from a line iterator
- * Lines do not contain trailing `\n' characters; The method inserts
- * a line separator `\n' between any two lines in the sequence.
+ * Lines do not contain trailing `\n` characters; The method inserts
+ * a line separator `\n` between any two lines in the sequence.
*/
def fromLines(source: Iterator[String]): PagedSeq[Char] = {
var isFirst = true
@@ -73,8 +73,8 @@ object PagedSeq {
}
/** Constructs a character sequence from a line iterable
- * Lines do not contain trailing `\n' characters; The method inserts
- * a line separator `\n' between any two lines in the sequence.
+ * Lines do not contain trailing `\n` characters; The method inserts
+ * a line separator `\n` between any two lines in the sequence.
*/
def fromLines(source: Iterable[String]): PagedSeq[Char] =
fromLines(source.iterator)
@@ -104,7 +104,7 @@ object PagedSeq {
import PagedSeq._
/** An implementation of lazily computed sequences, where elements are stored
- * in ``pages'', i.e. arrays of fixed size.
+ * in "pages", i.e. arrays of fixed size.
*
* @tparam T the type of the elements contained in this paged sequence, with a `ClassManifest` context bound.
*
@@ -156,14 +156,14 @@ extends scala.collection.IndexedSeq[T]
(latest.end min end) - start
}
- /** The character at position `index'.
+ /** The character at position `index`.
*/
def apply(index: Int) =
if (isDefinedAt(index)) page(index + start)(index + start)
else throw new IndexOutOfBoundsException(index.toString)
- /** Is character sequence defined at `index'?
- * Unlike `length' this operation does not force reading
+ /** Is character sequence defined at `index`?
+ * Unlike `length` this operation does not force reading
* a lazy sequence to the end.
*/
override def isDefinedAt(index: Int) =
@@ -171,8 +171,8 @@ extends scala.collection.IndexedSeq[T]
val p = page(index + start); index + start < p.end
}
- /** the subsequence from index `start' up to and excluding
- * the minimum of index `end' and the length of the current sequence.
+ /** the subsequence from index `start` up to and excluding
+ * the minimum of index `end` and the length of the current sequence.
*/
override def slice(_start: Int, _end: Int): PagedSeq[T] = {
page(start)
@@ -183,7 +183,7 @@ extends scala.collection.IndexedSeq[T]
new PagedSeq(more, f, s, e)
}
- /** the subsequence from index `start' up to the
+ /** the subsequence from index `start` up to the
* length of the current sequence.
*/
def slice(start: Int): PagedSeq[T] = slice(start, UndeterminedEnd)
@@ -212,7 +212,7 @@ private class Page[T: ClassManifest](val num: Int) {
/** The number of characters read into this page */
var filled: Int = 0
- /** Is this page the permamnently last one in the sequence? Only true once `more'
+ /** Is this page the permamnently last one in the sequence? Only true once `more`
* method has returned -1 to signal end of input. */
var isLast: Boolean = false
@@ -239,7 +239,7 @@ private class Page[T: ClassManifest](val num: Int) {
data(index - start)
}
- /** produces more characters by calling `more' and appends them on the current page,
+ /** produces more characters by calling `more` and appends them on the current page,
* or fills a subsequent page if current page is full
* pre: if current page is full, it is the last one in the sequence.
*/
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 4bea8fbaf1..bac60ac4f9 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -35,11 +35,11 @@ abstract class RedBlack[A] extends Serializable {
def delete(k: A): Tree[B] = blacken(del(k))
def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
def foreach[U](f: (A, B) => U)
- @deprecated("use `foreach' instead", "2.8.0")
+ @deprecated("use `foreach` instead", "2.8.0")
def visit[T](input: T)(f: (T, A, B) => (Boolean, T)): (Boolean, T)
def toStream: Stream[(A,B)]
def iterator: Iterator[(A, B)]
- @deprecated("use `iterator' instead", "2.8.0")
+ @deprecated("use `iterator` instead", "2.8.0")
def elements = iterator
def upd[B1 >: B](k: A, v: B1): Tree[B1]
def del(k: A): Tree[B]
@@ -165,7 +165,7 @@ abstract class RedBlack[A] extends Serializable {
right foreach f
}
- @deprecated("use `foreach' instead", "2.8.0")
+ @deprecated("use `foreach` instead", "2.8.0")
def visit[T](input: T)(f: (T,A,B) => (Boolean, T)): (Boolean, T) = {
val left = this.left.visit(input)(f)
if (!left._1) return left
@@ -281,7 +281,7 @@ abstract class RedBlack[A] extends Serializable {
def foreach[U](f: (A, Nothing) => U) {}
- @deprecated("use `foreach' instead", "2.8.0")
+ @deprecated("use `foreach` instead", "2.8.0")
def visit[T](input: T)(f: (T, A, Nothing) => (Boolean, T)) = (true, input)
def rng(from: Option[A], until: Option[A]) = this
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index ce2b3b1885..cb2823aeda 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -58,7 +58,7 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: Any => U): Unit = {}
}
- @deprecated("use `Set.empty' instead", "2.8.0")
+ @deprecated("use `Set.empty` instead", "2.8.0")
class EmptySet[A] extends Set[A] with Serializable {
override def size: Int = 0
def contains(elem: A): Boolean = false
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index b6145ecaf0..95ffaf5a3e 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -498,7 +498,7 @@ self =>
override def apply(idx: Int) = self.apply(idx)
}
- /** Defines the prefix of this object's <code>toString</code> representation as ``Stream''.
+ /** Defines the prefix of this object's `toString` representation as `Stream`.
*/
override def stringPrefix = "Stream"
@@ -726,7 +726,7 @@ object Stream extends SeqFactory[Stream] {
* @param step the increment function of the stream, must be monotonically increasing or decreasing
* @return the stream starting at value <code>start</code>.
*/
- @deprecated("use `iterate' instead.", "2.8.0")
+ @deprecated("use `iterate` instead.", "2.8.0")
def range(start: Int, end: Int, step: Int => Int): Stream[Int] =
iterate(start, end - start)(step)
@@ -736,7 +736,7 @@ object Stream extends SeqFactory[Stream] {
* @param elem the element composing the resulting stream
* @return the stream containing an infinite number of elem
*/
- @deprecated("use `continually' instead", "2.8.0")
+ @deprecated("use `continually` instead", "2.8.0")
def const[A](elem: A): Stream[A] = cons(elem, const(elem))
/** Create a stream containing several copies of an element.
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 5a3a369286..02e74a3a64 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -218,10 +218,10 @@ self =>
toString.split(re)
}
- /** You can follow a string with `.r', turning
+ /** You can follow a string with `.r`, turning
* it into a Regex. E.g.
*
- * """A\w*""".r is the regular expression for identifiers starting with `A'.
+ * """A\w*""".r is the regular expression for identifiers starting with `A`.
*/
def r: Regex = new Regex(toString)
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 0445adaa23..067911f75e 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -234,26 +234,26 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* the collection itself.
*
* $compatMutate
- * You are strongly recommended to use '+=' instead.
+ * You are strongly recommended to use `+=` instead.
*
* @param elem the element to add.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.", "2.8.0")
+ "Use `clone() +=` if you intend to create a new collection.", "2.8.0")
def + (elem: A): This = { +=(elem); repr }
/** Adds two or more elements to this collection and returns
* the collection itself.
*
* $compatMutate
- * You are strongly recommended to use '++=' instead.
+ * You are strongly recommended to use `++=` instead.
*
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
+ "Use `clone() ++=` if you intend to create a new collection.", "2.8.0")
def + (elem1: A, elem2: A, elems: A*): This = {
this += elem1 += elem2 ++= elems
repr
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 03102f73d2..a06b930033 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -45,7 +45,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
* @return the updated buffer.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
+ "Use `clone() ++=` if you intend to create a new collection.", "2.8.0")
override def +(elem: A): Buffer[A] = self.+(elem)
/** Append a single element to this buffer.
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index d541ad8f52..8e01908a97 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -54,7 +54,7 @@ extends Map[A, B] with Serializable
def iterator: Iterator[(A, B)] = imap.iterator
- @deprecated("use `iterator' instead", "2.8.0")
+ @deprecated("use `iterator` instead", "2.8.0")
override def elements = iterator
override def toList: List[(A, B)] = imap.toList
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 0975b33dd0..2c1da15153 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -40,7 +40,7 @@ class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) extends Set[A]
def iterator: Iterator[A] = set.iterator
- @deprecated("use `iterator' instead", "2.8.0")
+ @deprecated("use `iterator` instead", "2.8.0")
override def elements: Iterator[A] = iterator
def +=(elem: A): this.type = { set = set + elem; this }
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 6af00d03b1..f13a6bb70a 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -19,7 +19,7 @@ import TraversableView.NoBuilder
* $viewInfo
* Some of the operations of this class will yield again a mutable indexed sequence,
* others will just yield a plain indexed sequence of type `collection.IndexedSeq`.
- * Because this is a leaf class there is no associated `Like' class.
+ * Because this is a leaf class there is no associated `Like` class.
* @author Martin Odersky
* @version 2.8
* @since 2.8
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 92c6e8c162..38de28f084 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -164,7 +164,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* If key is not present return None.
* @param key the key to be removed
*/
- @deprecated("Use `remove' instead", "2.8.0")
+ @deprecated("Use `remove` instead", "2.8.0")
def removeKey(key: A): Option[B] = remove(key)
/** Removes all bindings from the map. After this operation has completed,
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 3a0d9e0996..c931513da9 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -100,7 +100,7 @@ extends LinearSeq[A]
last0.next = new LinkedList[A]
last0 = last0.next
last0.elem = elem
- last0.next = new LinkedList[A] // for performance, use sentinel `object' instead?
+ last0.next = new LinkedList[A] // for performance, use sentinel `object` instead?
len = len + 1
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 31113797dd..f67aabc62d 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -92,8 +92,8 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
@deprecated(
- "Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.", "2.8.0"
+ "Use `+=` instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() +=` if you intend to create a new collection.", "2.8.0"
)
def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
@@ -103,8 +103,8 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @param kvs the remaining elements.
*/
@deprecated(
- "Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0"
+ "Use `++=` instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() ++=` if you intend to create a new collection.", "2.8.0"
)
def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 7e06199515..d306fb702d 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -32,8 +32,8 @@ trait Publisher[Evt] {
type Sub = Subscriber[Evt, Pub]
type Filter = Evt => Boolean
- /** The publisher itself of type `Pub'. Implemented by a cast from `this' here.
- * Needs to be overridden if the actual publisher is different from `this'.
+ /** The publisher itself of type `Pub`. Implemented by a cast from `this` here.
+ * Needs to be overridden if the actual publisher is different from `this`.
*/
protected val self: Pub = this.asInstanceOf[Pub]
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 8d7ed8bcd2..d6d0951594 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -89,7 +89,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*/
def capacity: Int = underlying.capacity()
- @deprecated("Use `ensureCapacity' instead. An assignment is misleading because\n"+
+ @deprecated("Use `ensureCapacity` instead. An assignment is misleading because\n"+
"it can never decrease the capacity.", "2.8.0")
def capacity_=(n: Int) { ensureCapacity(n) }
@@ -367,11 +367,11 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x))
@deprecated("Use appendAll instead. This method is deprecated because of the\n"+
- "possible confusion with `append(Any)'.", "2.8.0")
+ "possible confusion with `append(Any)`.", "2.8.0")
def append(x: Seq[Char]): StringBuilder = appendAll(x)
@deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any)'.", "2.8.0")
+ "of the possible confusion with `append(Any)`.", "2.8.0")
def append(x: Array[Char]): StringBuilder = appendAll(x)
@deprecated("use appendAll instead. This method is deprecated because\n"+
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
index 9498a62f32..db6e81cc26 100644
--- a/src/library/scala/concurrent/ops.scala
+++ b/src/library/scala/concurrent/ops.scala
@@ -49,7 +49,7 @@ object ops
runner.futureAsFunction(runner submit runner.functionAsTask(() => p))
}
- /** Evaluates two expressions in parallel. Invoking `par' blocks the current
+ /** Evaluates two expressions in parallel. Invoking `par` blocks the current
* thread until both expressions have been evaluated.
*
* @param xp the first expression to evaluate
@@ -68,7 +68,7 @@ object ops
* @param end ...
* @param p ...
*/
- @deprecated("use `collection.parallel.ParIterable.foreach' instead", "2.9.0")
+ @deprecated("use `collection.parallel.ParIterable.foreach` instead", "2.9.0")
def replicate(start: Int, end: Int)(p: Int => Unit)(implicit runner: TaskRunner = defaultRunner) {
if (start == end)
()
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 5b6fc6ffd5..b8ddd54233 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -215,7 +215,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger))
/** Returns a BigInt whose value is (this mod m).
- * This method differs from `%' in that it always returns a non-negative BigInt.
+ * This method differs from `%` in that it always returns a non-negative BigInt.
*/
def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger))
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index 3f3892e802..2189f6df82 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -49,7 +49,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
}
/** Tests whether the type represented by this manifest is a subtype
- * of the type represented by `that' manifest, subject to the limitations
+ * of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
def <:<(that: ClassManifest[_]): Boolean = {
@@ -82,7 +82,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
}
/** Tests whether the type represented by this manifest is a supertype
- * of the type represented by `that' manifest, subject to the limitations
+ * of the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
def >:>(that: ClassManifest[_]): Boolean =
@@ -94,7 +94,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
}
/** Tests whether the type represented by this manifest is equal to
- * the type represented by `that' manifest, subject to the limitations
+ * the type represented by `that` manifest, subject to the limitations
* described in the header.
*/
override def equals(that: Any): Boolean = that match {
@@ -179,7 +179,7 @@ object ClassManifest {
def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
- /** ClassManifest for the class type `clazz', where `clazz' is
+ /** ClassManifest for the class type `clazz`, where `clazz` is
* a top-level or static class.
* @note This no-prefix, no-arguments case is separate because we
* it's called from ScalaRunTime.boxArray itself. If we
@@ -189,12 +189,12 @@ object ClassManifest {
def classType[T <: AnyRef](clazz: JClass[_]): ClassManifest[T] =
new ClassTypeManifest[T](None, clazz, Nil)
- /** ClassManifest for the class type `clazz[args]', where `clazz' is
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class and `args` are its type arguments */
def classType[T <: AnyRef](clazz: JClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] =
new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
- /** ClassManifest for the class type `clazz[args]', where `clazz' is
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
* a class with non-package prefix type `prefix` and type arguments `args`.
*/
def classType[T <: AnyRef](prefix: OptManifest[_], clazz: JClass[_], args: OptManifest[_]*): ClassManifest[T] =
@@ -205,7 +205,7 @@ object ClassManifest {
case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
}
- /** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: OptManifest[_], name: String, clazz: JClass[_], args: OptManifest[_]*): ClassManifest[T] =
@@ -215,7 +215,7 @@ object ClassManifest {
override def toString = prefix.toString+"#"+name+argString
}
- /** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection.
* todo: remove after next boostrap
@@ -228,7 +228,7 @@ object ClassManifest {
}
}
-/** Manifest for the class type `clazz[args]', where `clazz' is
+/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T <: AnyRef](
prefix: Option[OptManifest[_]],
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 229fa728a8..a75f90ebf0 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -204,11 +204,11 @@ object Manifest {
override lazy val toString = value.toString + ".type"
}
- /** Manifest for the singleton type `value.type'. */
+ /** Manifest for the singleton type `value.type`. */
def singleType[T <: AnyRef](value: AnyRef): Manifest[T] =
new SingletonTypeManifest[T](value)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class.
* @note This no-prefix, no-arguments case is separate because we
* it's called from ScalaRunTime.boxArray itself. If we
@@ -218,18 +218,18 @@ object Manifest {
def classType[T](clazz: Predef.Class[_]): Manifest[T] =
new ClassTypeManifest[T](None, clazz, Nil)
- /** Manifest for the class type `clazz', where `clazz' is
+ /** Manifest for the class type `clazz`, where `clazz` is
* a top-level or static class and args are its type arguments. */
def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a class with non-package prefix type `prefix` and type arguments `args`.
*/
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
val erasure: Predef.Class[_],
@@ -243,7 +243,7 @@ object Manifest {
def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] =
arg.asInstanceOf[Manifest[T]].arrayManifest
- /** Manifest for the abstract type `prefix # name'. `upperBound' is not
+ /** Manifest for the abstract type `prefix # name'. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
def abstractType[T](prefix: Manifest[_], name: String, clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
@@ -253,7 +253,7 @@ object Manifest {
override def toString = prefix.toString+"#"+name+argString
}
- /** Manifest for the unknown type `_ >: L <: U' in an existential.
+ /** Manifest for the unknown type `_ >: L <: U` in an existential.
*/
def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
new Manifest[T] {
diff --git a/src/library/scala/reflect/generic/Flags.scala b/src/library/scala/reflect/generic/Flags.scala
index b56faf8934..61dd93928c 100755
--- a/src/library/scala/reflect/generic/Flags.scala
+++ b/src/library/scala/reflect/generic/Flags.scala
@@ -14,7 +14,7 @@ package generic
final val CASE = 0x00000800
final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction with abstract override.
// Note difference to DEFERRED!
- final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
+ final val DEFERRED = 0x00000010 // was `abstract` for members | trait is virtual
final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
final val MUTABLE = 0x00001000 // symbol is a mutable variable.
final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
@@ -77,8 +77,8 @@ object ModifierFlags extends ModifierFlags
final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
final val VARARGS = 0x80000000000L// symbol is a Java-style varargs method
- final val TRIEDCOOKING = 0x100000000000L // ``Cooking'' has been tried on this symbol
- // A Java method's type is ``cooked'' by transforming raw types to existentials
+ final val TRIEDCOOKING = 0x100000000000L // "Cooking" has been tried on this symbol
+ // A Java method's type is "cooked" by transforming raw types to existentials
// pickling and unpickling of flags
diff --git a/src/library/scala/reflect/generic/PickleFormat.scala b/src/library/scala/reflect/generic/PickleFormat.scala
index c6308e7db8..9c7dc72d69 100755
--- a/src/library/scala/reflect/generic/PickleFormat.scala
+++ b/src/library/scala/reflect/generic/PickleFormat.scala
@@ -110,7 +110,7 @@ package generic
* AnnotArg = Tree | Constant
* ConstAnnotArg = Constant | AnnotInfo | AnnotArgArray
*
- * len is remaining length after `len'.
+ * len is remaining length after `len`.
*/
val MajorVersion = 5
val MinorVersion = 0
diff --git a/src/library/scala/reflect/generic/StdNames.scala b/src/library/scala/reflect/generic/StdNames.scala
index 8f8d695016..de5c98886b 100755
--- a/src/library/scala/reflect/generic/StdNames.scala
+++ b/src/library/scala/reflect/generic/StdNames.scala
@@ -30,7 +30,7 @@ import scala.reflect.NameTransformer
val LOCAL_SUFFIX_STRING = " "
val ROOTPKG: NameType = "_root_"
- /** The expanded name of `name' relative to this class `base` with given `separator`
+ /** The expanded name of `name` relative to this class `base` with given `separator`
*/
def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
newTermName(base.fullName('$') + separator + name)
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
index a58711663e..7efb87676f 100755
--- a/src/library/scala/reflect/generic/Symbols.scala
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -99,7 +99,7 @@ import Flags._
* object Foo
* class Foo
*
- * Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
+ * Then object Foo has a `moduleClass` (invisible to the user, the backend calls it Foo$
* linkedClassOfClass goes from class Foo$ to class Foo, and back.
*/
def linkedClassOfClass: Symbol
diff --git a/src/library/scala/reflect/generic/Trees.scala b/src/library/scala/reflect/generic/Trees.scala
index d44f6b5b8f..dace1628da 100755
--- a/src/library/scala/reflect/generic/Trees.scala
+++ b/src/library/scala/reflect/generic/Trees.scala
@@ -407,7 +407,7 @@ import Flags._
*/
case class ApplyDynamic(qual: Tree, args: List[Tree])
extends TermTree with SymTree
- // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
+ // The symbol of an ApplyDynamic is the function symbol of `qual`, or NoSymbol, if there is none.
/** Super reference, qual = corresponding this reference */
case class Super(qual: Tree, mix: TypeName) extends TermTree {
diff --git a/src/library/scala/reflect/generic/Types.scala b/src/library/scala/reflect/generic/Types.scala
index 837f5484db..b3601cf364 100755
--- a/src/library/scala/reflect/generic/Types.scala
+++ b/src/library/scala/reflect/generic/Types.scala
@@ -11,7 +11,7 @@ package generic
*/
def isComplete: Boolean = true
- /** If this is a lazy type, assign a new type to `sym'. */
+ /** If this is a lazy type, assign a new type to `sym`. */
def complete(sym: Symbol) {}
/** Convert toString avoiding infinite recursions by cutting off
diff --git a/src/library/scala/reflect/generic/UnPickler.scala b/src/library/scala/reflect/generic/UnPickler.scala
index 3bae268295..5d47195f54 100755
--- a/src/library/scala/reflect/generic/UnPickler.scala
+++ b/src/library/scala/reflect/generic/UnPickler.scala
@@ -61,7 +61,7 @@ abstract class UnPickler {
/** A map from entry numbers to symbols, types, or annotations */
private val entries = new Array[AnyRef](index.length)
- /** A map from symbols to their associated `decls' scopes */
+ /** A map from symbols to their associated `decls` scopes */
private val symScopes = new HashMap[Symbol, Scope]
//println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
@@ -97,7 +97,7 @@ abstract class UnPickler {
" in "+filename)
}
- /** The `decls' scope associated with given symbol */
+ /** The `decls` scope associated with given symbol */
protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope)
/** Does entry represent an (internal) symbol */
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index c436df9f8f..331d30e0bb 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -29,7 +29,7 @@ class Breaks {
private val breakException = new BreakControl
- /** A block from which one can exit with a `break''. */
+ /** A block from which one can exit with a `break`'. */
def breakable(op: => Unit) {
try {
op
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 6a783e4594..663900dd0a 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -300,7 +300,7 @@ object Regex {
nextSeen
}
- /** The next matched substring of `source' */
+ /** The next matched substring of `source` */
def next: String = {
if (!hasNext) throw new NoSuchElementException
nextSeen = false
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index a4f457e1bf..2ecb702446 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -27,8 +27,8 @@ import scala.collection.mutable.Map
* @author Adriaan Moors
*/
trait Mappable {
- trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T' is too strict:
- sometimes we want to allow `Forall T >: precision. T => T' for some type `precision', so that,
+ trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
+ sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
beneath a certain threshold, we have some leeway.
concretely: to use gmap for substitution, we simply require that ast nodes are mapped to ast nodes,
we can't require that the type is preserved precisely: a Name may map to e.g., a MethodCall
@@ -68,27 +68,27 @@ trait Mappable {
* allowed to be left unqualified, these mechanisms would have to be
* complemented by an extra phase that resolves names that couldn't be
* resolved using the naive binding rules. (Maybe some machinery to
- * model `implicit' binders (e.g., `this' and imported qualifiers)
+ * model `implicit` binders (e.g., `this` and imported qualifiers)
* and selection on a binder will suffice?)
* </p>
*
* @author Adriaan Moors
*/
trait Binders extends AbstractSyntax with Mappable {
- /** A `Scope' keeps track of one or more syntactic elements that represent bound names.
- * The elements it contains share the same scope and must all be distinct (wrt. ==)
+ /** A `Scope` keeps track of one or more syntactic elements that represent bound names.
+ * The elements it contains share the same scope and must all be distinct, as determined by `==`.
*
- * A `NameElement' `n' in the AST that is conceptually bound by a `Scope' `s', is replaced by a
- * `BoundElement(n, s)'. (For example, in `val x:Int=x+1', the first `x' is modelled by a
- * Scope `s' that contains `x' and the second `x' is represented by a `BoundElement(`x', s)')
- * The term (`x+1') in scope of the Scope becomes an `UnderBinder(s, `x+1').
+ * A `NameElement` `n` in the AST that is conceptually bound by a `Scope` `s`, is replaced by a
+ * `BoundElement(n, s)'. (For example, in `val x:Int=x+1', the first `x` is modelled by a
+ * Scope `s` that contains `x` and the second `x` is represented by a `BoundElement(`x`, s)')
+ * The term (`x+1`) in scope of the Scope becomes an `UnderBinder(s, `x+1`).
*
- * A `NameElement' `n' is bound by a `Scope' `s' if it is wrapped as a `BoundElement(`n', s)', and
- * `s' has a binder element that is semantically equal (`equals' or `==') to `n'.
+ * A `NameElement` `n` is bound by a `Scope` `s` if it is wrapped as a `BoundElement(`n`, s)', and
+ * `s` has a binder element that is semantically equal (`equals` or `==`) to `n`.
*
- * A `Scope' is represented textually by its list of binder elements, followed by the scope's `id'.
- * For example: `[x, y]!1' represents the scope with `id' `1' and binder elements `x' and `y'.
- * (`id' is solely used for this textual representation.)
+ * A `Scope` is represented textually by its list of binder elements, followed by the scope's `id`.
+ * For example: `[x, y]!1` represents the scope with `id` `1` and binder elements `x` and `y`.
+ * (`id` is solely used for this textual representation.)
*/
class Scope[binderType <: NameElement] extends Iterable[binderType]{
private val substitution: Map[binderType, Element] =
@@ -104,10 +104,10 @@ trait Binders extends AbstractSyntax with Mappable {
*/
def iterator = substitution.keysIterator
- /** Return the `i'th binder in this scope.*/
+ /** Return the `i`th binder in this scope.*/
def apply(i: Int): binderType = this.iterator.toList(i)
- /** Returns true if this container has a binder equal (==) to `b'
+ /** Returns true if this container has a binder equal (==) to `b`
*/
def binds(b: binderType): Boolean = substitution.contains(b)
@@ -132,29 +132,29 @@ trait Binders extends AbstractSyntax with Mappable {
*/
def addBinder(b: binderType) { substitution += Pair(b, b) }
- /** `canAddElement' indicates whether `b' may be added to this scope.
+ /** `canAddElement` indicates whether `b` may be added to this scope.
*
* TODO: strengthen this condition so that no binders may be added after this scope has been
- * linked to its `UnderBinder' (i.e., while parsing, BoundElements may be added to the Scope
+ * linked to its `UnderBinder` (i.e., while parsing, BoundElements may be added to the Scope
* associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
*
- * @return true if `b' had not been added yet
+ * @return true if `b` had not been added yet
*/
def canAddBinder(b: binderType): Boolean = !binds(b)
/** ``Replaces'' the bound occurrences of a contained binder by their new value.
- * The bound occurrences of `b' are not actually replaced; the scope keeps track
- * of a substitution that maps every binder to its current value. Since a `BoundElement' is
- * a proxy for the element it is bound to by its binder, `substitute' may thus be thought of
- * as replacing all the bound occurrences of the given binder `b' by their new value `value'.
+ * The bound occurrences of `b` are not actually replaced; the scope keeps track
+ * of a substitution that maps every binder to its current value. Since a `BoundElement` is
+ * a proxy for the element it is bound to by its binder, `substitute` may thus be thought of
+ * as replacing all the bound occurrences of the given binder `b` by their new value `value`.
*
* @param b the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
- * @param value the new value for the bound occurrences of `b'
+ * @param value the new value for the bound occurrences of `b`
* @return `getElementFor(b) eq value` will hold.
*/
def substitute(b: binderType, value: Element): Unit = substitution(b) = value
- /** Returns the current value for the bound occurrences of `b'.
+ /** Returns the current value for the bound occurrences of `b`.
*
* @param b the contained binder whose current value should be returned `binds(b)` must hold.
*/
@@ -183,17 +183,17 @@ trait Binders extends AbstractSyntax with Mappable {
// def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean
}
- /** A `BoundElement' is bound in a certain scope `scope', which keeps track of the actual element that
- * `el' stands for.
+ /** A `BoundElement` is bound in a certain scope `scope`, which keeps track of the actual element that
+ * `el` stands for.
*
- * A `BoundElement' is represented textually by its bound element, followed by its scope's `id'.
- * For example: `x@1' represents the variable `x' that is bound in the scope with `id' `1'.
+ * A `BoundElement` is represented textually by its bound element, followed by its scope's `id`.
+ * For example: `x@1` represents the variable `x` that is bound in the scope with `id` `1`.
*
* @note `scope.binds(el)` holds before and after.
*/
case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
- /** Returns the element this `BoundElement' stands for.
- * The `Proxy' trait ensures `equals', `hashCode' and `toString' are forwarded to
+ /** Returns the element this `BoundElement` stands for.
+ * The `Proxy` trait ensures `equals`, `hashCode` and `toString` are forwarded to
* the result of this method.
*/
def self: Element = scope.getElementFor(el)
@@ -222,10 +222,10 @@ trait Binders extends AbstractSyntax with Mappable {
override def toString: String = "(" + scope.toString + ") in { "+element.toString+" }"
/** Alpha-equivalence -- TODO
- * Returns true if the `element' of the `other' `UnderBinder' is equal to this `element' up to alpha-conversion.
+ * Returns true if the `element` of the `other` `UnderBinder` is equal to this `element` up to alpha-conversion.
*
- * That is, regular equality is used for all elements but `BoundElement's: such an element is
- * equal to a `BoundElement' in `other' if their binders are equal. Binders are equal if they
+ * That is, regular equality is used for all elements but `BoundElement`s: such an element is
+ * equal to a `BoundElement` in `other` if their binders are equal. Binders are equal if they
* are at the same index in their respective scope.
*
* Example: UnderBinder([x, y]!1, x@1) alpha_== UnderBinder([a, b]!2, a@2)
@@ -291,10 +291,10 @@ trait Binders extends AbstractSyntax with Mappable {
def unit[bt <: NameElement, elementT <% Mappable[elementT]](x: elementT) = UnderBinder(new Scope[bt](), x)
}
- /** If a list of `UnderBinder's all have the same scope, they can be turned in to an UnderBinder
- * containing a list of the elements in the original `UnderBinder'.
+ /** If a list of `UnderBinder`s all have the same scope, they can be turned in to an UnderBinder
+ * containing a list of the elements in the original `UnderBinder`.
*
- * The name `sequence' comes from the fact that this method's type is equal to the type of monadic sequence.
+ * The name `sequence` comes from the fact that this method's type is equal to the type of monadic sequence.
*
* @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
*
@@ -307,11 +307,11 @@ trait Binders extends AbstractSyntax with Mappable {
def unsequence[bt <: NameElement, st <% Mappable[st]](orig: UnderBinder[bt, List[st]]): List[UnderBinder[bt, st]] =
orig.element.map(sc => UnderBinder(orig.scope, sc))
- /** An environment that maps a `NameElement' to the scope in which it is bound.
+ /** An environment that maps a `NameElement` to the scope in which it is bound.
* This can be used to model scoping during parsing.
*
- * (This class is similar to Burak's ECOOP paper on pattern matching, except that we use `=='
- * instead of `eq', thus types can't be unified in general)
+ * (This class is similar to Burak's ECOOP paper on pattern matching, except that we use `==`
+ * instead of `eq`, thus types can't be unified in general)
*
* TODO: more documentation
*/
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index 7f173dada9..32261c102e 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -9,17 +9,17 @@
package scala.util.parsing.combinator
-/** This object contains implicit conversions that come in handy when using the `^^' combinator
+/** This object contains implicit conversions that come in handy when using the `^^` combinator
* {@see Parsers} to construct an AST from the concrete syntax.
*<p>
- * The reason for this is that the sequential composition combinator (`~') combines its constituents
- * into a ~. When several `~'s are combined, this results in nested `~'s (to the left).
- * The `flatten*' coercions makes it easy to apply an `n'-argument function to a nested ~ of
- * depth (`n-1')</p>
+ * The reason for this is that the sequential composition combinator (`~`) combines its constituents
+ * into a ~. When several `~`s are combined, this results in nested `~`s (to the left).
+ * The `flatten*` coercions makes it easy to apply an `n`-argument function to a nested ~ of
+ * depth (`n-1`)</p>
*<p>
- * The `headOptionTailToFunList' converts a function that takes a List[A] to a function that
+ * The `headOptionTailToFunList` converts a function that takes a List[A] to a function that
* accepts a ~[A, Option[List[A]]] (this happens when, e.g., parsing something of the following
- * shape: p ~ opt("." ~ repsep(p, ".")) -- where `p' is a parser that yields an A)</p>
+ * shape: p ~ opt("." ~ repsep(p, ".")) -- where `p` is a parser that yields an A)</p>
*
* @author Martin Odersky, Iulian Dragos, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index a02f33ef36..b0760f42ae 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -66,19 +66,19 @@ trait Parsers {
sealed abstract class ParseResult[+T] {
/** Functional composition of ParseResults
*
- * @param `f' the function to be lifted over this result
- * @return `f' applied to the result of this `ParseResult', packaged up as a new `ParseResult'
+ * @param `f` the function to be lifted over this result
+ * @return `f` applied to the result of this `ParseResult`, packaged up as a new `ParseResult`
*/
def map[U](f: T => U): ParseResult[U]
/** Partial functional composition of ParseResults
*
- * @param `f' the partial function to be lifted over this result
- * @param error a function that takes the same argument as `f' and produces an error message
- * to explain why `f' wasn't applicable (it is called when this is the case)
- * @return <i>if `f' f is defined at the result in this `ParseResult',</i>
- * `f' applied to the result of this `ParseResult', packaged up as a new `ParseResult'.
- * If `f' is not defined, `Failure'.
+ * @param `f` the partial function to be lifted over this result
+ * @param error a function that takes the same argument as `f` and produces an error message
+ * to explain why `f` wasn't applicable (it is called when this is the case)
+ * @return <i>if `f` f is defined at the result in this `ParseResult`,</i>
+ * `f` applied to the result of this `ParseResult`, packaged up as a new `ParseResult`.
+ * If `f` is not defined, `Failure`.
*/
def mapPartial[U](f: PartialFunction[T, U], error: T => String): ParseResult[U]
@@ -167,7 +167,7 @@ trait Parsers {
}
/** The fatal failure case of ParseResult: contains an error-message and the remaining input.
- * No back-tracking is done when a parser returns an `Error'
+ * No back-tracking is done when a parser returns an `Error`
*
* @param msg An error message string describing the error.
* @param next The parser's unconsumed input at the point where the error occurred.
@@ -215,43 +215,41 @@ trait Parsers {
// it's short, light (looks like whitespace), has few overloaded meaning (thanks to the recent change from ~ to unary_~)
// and we love it! (or do we like `,` better?)
- /** A parser combinator for sequential composition
+ /** A parser combinator for sequential composition.
*
- * <p> `p ~ q' succeeds if `p' succeeds and `q' succeeds on the input
- * left over by `p'.</p>
+ * `p ~ q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
*
- * @param q a parser that will be executed after `p' (this parser) succeeds -- evaluated at most once, and only when necessary
- * @return a `Parser' that -- on success -- returns a `~' (like a Pair, but easier to pattern match on)
- * that contains the result of `p' and that of `q'.
- * The resulting parser fails if either `p' or `q' fails.
+ * @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
+ * @return a `Parser` that -- on success -- returns a `~` (like a Pair, but easier to pattern match on)
+ * that contains the result of `p` and that of `q`.
+ * The resulting parser fails if either `p` or `q` fails.
*/
@migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { lazy val p = q // lazy argument
(for(a <- this; b <- p) yield new ~(a,b)).named("~")
}
- /** A parser combinator for sequential composition which keeps only the right result
+ /** A parser combinator for sequential composition which keeps only the right result.
*
- * <p> `p ~> q' succeeds if `p' succeeds and `q' succeeds on the input
- * left over by `p'.</p>
+ * `p ~> q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
*
- * @param q a parser that will be executed after `p' (this parser) succeeds -- evaluated at most once, and only when necessary
- * @return a `Parser' that -- on success -- returns the result of `q'.
+ * @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
+ * @return a `Parser` that -- on success -- returns the result of `q`.
*/
@migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
def ~> [U](q: => Parser[U]): Parser[U] = { lazy val p = q // lazy argument
(for(a <- this; b <- p) yield b).named("~>")
}
- /** A parser combinator for sequential composition which keeps only the left result
+ /** A parser combinator for sequential composition which keeps only the left result.
*
- * <p> `p &lt;~ q' succeeds if `p' succeeds and `q' succeeds on the input
- * left over by `p'.</p>
+ * <p> `p <~ q` succeeds if `p` succeeds and `q` succeeds on the input
+ * left over by `p`.</p>
*
- * <b>Note:</b> &lt;~ has lower operator precedence than ~ or ~>.
+ * '''Note:''' <~ has lower operator precedence than ~ or ~>.
*
- * @param q a parser that will be executed after `p' (this parser) succeeds -- evaluated at most once, and only when necessary
- * @return a `Parser' that -- on success -- returns the result of `p'.
+ * @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
+ * @return a `Parser` that -- on success -- returns the result of `p`.
*/
@migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
def <~ [U](q: => Parser[U]): Parser[T] = { lazy val p = q // lazy argument
@@ -263,43 +261,40 @@ trait Parsers {
def apply(in: Input) = seq(Parser.this, q)((x, y) => combine(x,y))(in)
} */
- /** A parser combinator for non-back-tracking sequential composition
+ /** A parser combinator for non-back-tracking sequential composition.
*
- *<p>`p ~! q' succeeds if `p' succeeds and `q' succeeds on the input
- * left over by `p'. In case of failure, no back-tracking is performed
- * (in an earlier parser produced by the | combinator).</p>
+ * `p ~! q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
+ * In case of failure, no back-tracking is performed (in an earlier parser produced by the `|` combinator).
*
- * @param q a parser that will be executed after `p' (this parser) succeeds
- * @return a `Parser' that -- on success -- returns a `~' (like a Pair, but easier to pattern match on)
- * that contains the result of `p' and that of `q'.
- * The resulting parser fails if either `p' or `q' fails, this failure is fatal.
+ * @param q a parser that will be executed after `p` (this parser) succeeds
+ * @return a `Parser` that -- on success -- returns a `~` (like a Pair, but easier to pattern match on)
+ * that contains the result of `p` and that of `q`.
+ * The resulting parser fails if either `p` or `q` fails, this failure is fatal.
*/
def ~! [U](p: => Parser[U]): Parser[~[T, U]]
= OnceParser{ (for(a <- this; b <- commit(p)) yield new ~(a,b)).named("~!") }
- /** A parser combinator for alternative composition
+ /** A parser combinator for alternative composition.
*
- *<p>`p | q' succeeds if `p' succeeds or `q' succeeds
- * Note that `q' is only tried if `p's failure is non-fatal (i.e., back-tracking is
- * allowed).</p>
+ * `p | q` succeeds if `p` succeeds or `q` succeeds.
+ * Note that `q` is only tried if `p`s failure is non-fatal (i.e., back-tracking is allowed).
*
- * @param q a parser that will be executed if `p' (this parser) fails (and allows back-tracking)
- * @return a `Parser' that returns the result of the first parser to succeed (out of `p' and `q')
+ * @param q a parser that will be executed if `p` (this parser) fails (and allows back-tracking)
+ * @return a `Parser` that returns the result of the first parser to succeed (out of `p` and `q`)
* The resulting parser succeeds if (and only if) <ul>
- * <li> `p' succeeds, <i>or</i> </li>
- * <li> if `p' fails allowing back-tracking and `q' succeeds. </li> </ul>
+ * <li> `p` succeeds, <i>or</i> </li>
+ * <li> if `p` fails allowing back-tracking and `q` succeeds. </li> </ul>
*/
def | [U >: T](q: => Parser[U]): Parser[U] = append(q).named("|")
// TODO
- /** A parser combinator for alternative with longest match composition
+ /** A parser combinator for alternative with longest match composition.
*
- *<p>`p ||| q' succeeds if `p' succeeds or `q' succeeds
- * If `p' and `q' both succeed, the parser that consumed the most
- * characters accepts.</p>
+ * `p ||| q` succeeds if `p` succeeds or `q` succeeds.
+ * If `p` and `q` both succeed, the parser that consumed the most characters accepts.
*
* @param q0 a parser that accepts if p consumes less characters. -- evaluated at most once, and only when necessary
- * @return a `Parser' that returns the result of the parser consuming the most characters (out of `p' and `q').
+ * @return a `Parser` that returns the result of the parser consuming the most characters (out of `p` and `q`).
*/
@migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
def ||| [U >: T](q0: => Parser[U]): Parser[U] = new Parser[U] {
@@ -319,20 +314,21 @@ trait Parsers {
override def toString = "|||"
}
- /** A parser combinator for function application
+ /** A parser combinator for function application.
*
- *<p>`p ^^ f' succeeds if `p' succeeds; it returns `f' applied to the result of `p'.</p>
+ * `p ^^ f` succeeds if `p` succeeds; it returns `f` applied to the result of `p`.
*
- * @param f a function that will be applied to this parser's result (see `map' in `ParseResult').
+ * @param f a function that will be applied to this parser's result (see `map` in `ParseResult`).
* @return a parser that has the same behaviour as the current parser, but whose result is
- * transformed by `f'.
+ * transformed by `f`.
*/
def ^^ [U](f: T => U): Parser[U] = map(f).named(toString+"^^")
/** A parser combinator that changes a successful result into the specified value.
*
- * <p>`p ^^^ v' succeeds if `p' succeeds; discards its result, and returns `v` instead.</p>
+ * `p ^^^ v` succeeds if `p` succeeds; discards its result, and returns `v` instead.
+ *
* @param v The new result for the parser, evaluated at most once (if `p` succeeds), not evaluated at all if `p` fails.
* @return a parser that has the same behaviour as the current parser, but whose successful result is `v`
*/
@@ -342,48 +338,47 @@ trait Parsers {
def apply(in: Input) = Parser.this(in) map (x => v0)
}.named(toString+"^^^")
- /** A parser combinator for partial function application
+ /** A parser combinator for partial function application.
*
- *<p>`p ^? (f, error)' succeeds if `p' succeeds AND `f' is defined at the result of `p';
- * in that case, it returns `f' applied to the result of `p'. If `f' is not applicable,
- * error(the result of `p') should explain why.</p>
+ * `p ^? (f, error)` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
+ * in that case, it returns `f` applied to the result of `p`. If `f` is not applicable,
+ * error(the result of `p`) should explain why.
*
* @param f a partial function that will be applied to this parser's result
- * (see `mapPartial' in `ParseResult').
- * @param error a function that takes the same argument as `f' and produces an error message
- * to explain why `f' wasn't applicable
- * @return a parser that succeeds if the current parser succeeds <i>and</i> `f' is applicable
- * to the result. If so, the result will be transformed by `f'.
+ * (see `mapPartial` in `ParseResult`).
+ * @param error a function that takes the same argument as `f` and produces an error message
+ * to explain why `f` wasn't applicable
+ * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
+ * to the result. If so, the result will be transformed by `f`.
*/
def ^? [U](f: PartialFunction[T, U], error: T => String): Parser[U] = Parser{ in =>
this(in).mapPartial(f, error)}.named(toString+"^?")
- /** A parser combinator for partial function application
+ /** A parser combinator for partial function application.
*
- *<p>`p ^? f' succeeds if `p' succeeds AND `f' is defined at the result of `p';
- * in that case, it returns `f' applied to the result of `p'.</p>
+ * `p ^? f` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
+ * in that case, it returns `f` applied to the result of `p`.
*
* @param f a partial function that will be applied to this parser's result
- * (see `mapPartial' in `ParseResult').
- * @return a parser that succeeds if the current parser succeeds <i>and</i> `f' is applicable
- * to the result. If so, the result will be transformed by `f'.
+ * (see `mapPartial` in `ParseResult`).
+ * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
+ * to the result. If so, the result will be transformed by `f`.
*/
def ^? [U](f: PartialFunction[T, U]): Parser[U] = ^?(f, r => "Constructor function not defined at "+r)
- /** A parser combinator that parameterizes a subsequent parser with the result of this one
+ /** A parser combinator that parameterizes a subsequent parser with the result of this one.
*
- *<p>
- * Use this combinator when a parser depends on the result of a previous parser. `p' should be
- * a function that takes the result from the first parser and returns the second parser.</p>
+ * Use this combinator when a parser depends on the result of a previous parser. `p` should be
+ * a function that takes the result from the first parser and returns the second parser.
*
- *<p> `p into fq' (with `fq' typically `{x => q}') first applies `p', and then, if `p' successfully
- * returned result `r', applies `fq(r)' to the rest of the input. </p>
+ * `p into fq` (with `fq` typically `{x => q}') first applies `p`, and then, if `p` successfully
+ * returned result `r`, applies `fq(r)` to the rest of the input.
*
- *<p> From: G. Hutton. Higher-order functions for parsing. J. Funct. Program., 2(3):323--343, 1992. </p>
+ * ''From: G. Hutton. Higher-order functions for parsing. J. Funct. Program., 2(3):323--343, 1992.''
*
* @param fq a function that, given the result from this parser, returns the second parser to be applied
- * @return a parser that succeeds if this parser succeeds (with result `x') and if then `fq(x)' succeeds
+ * @return a parser that succeeds if this parser succeeds (with result `x`) and if then `fq(x)` succeeds
*/
def into[U](fq: T => Parser[U]): Parser[U] = flatMap(fq)
@@ -399,8 +394,8 @@ trait Parsers {
*/
def * = rep(this)
- /** Returns a parser that repeatedly parses what this parser parses, interleaved with the `sep' parser.
- * The `sep' parser specifies how the results parsed by this parser should be combined.
+ /** Returns a parser that repeatedly parses what this parser parses, interleaved with the `sep` parser.
+ * The `sep` parser specifies how the results parsed by this parser should be combined.
*
* @return chainl1(this, sep)
*/
@@ -421,8 +416,8 @@ trait Parsers {
def ? = opt(this)
}
- /** Wrap a parser so that its failures become errors (the | combinator will give up as soon as
- * it encounters an error, on failure it simply tries the next alternative)
+ /** Wrap a parser so that its failures become errors (the `|` combinator will give up as soon as
+ * it encounters an error, on failure it simply tries the next alternative).
*/
def commit[T](p: => Parser[T]) = Parser{ in =>
p(in) match{
@@ -437,9 +432,9 @@ trait Parsers {
case class EFNil(res: Boolean) extends ElemFun*/
- /** A parser matching input elements that satisfy a given predicate
+ /** A parser matching input elements that satisfy a given predicate.
*
- * <p>elem(kind, p) succeeds if the input starts with an element `e' for which p(e) is true.</p>
+ * `elem(kind, p)` succeeds if the input starts with an element `e` for which p(e) is true.
*
* @param kind The element kind, used for error messages
* @param p A predicate that determines which elements match.
@@ -447,49 +442,48 @@ trait Parsers {
*/
def elem(kind: String, p: Elem => Boolean) = acceptIf(p)(inEl => kind+" expected")
- /** A parser that matches only the given element `e'
+ /** A parser that matches only the given element `e`.
*
- * <p>elem(e) succeeds if the input starts with an element `e'</p>
+ * `elem(e)` succeeds if the input starts with an element `e`.
*
- * @param e the `Elem' that must be the next piece of input for the returned parser to succeed
- * @return a `Parser' that succeeds if `e' is the next available input (and returns it).
+ * @param e the `Elem` that must be the next piece of input for the returned parser to succeed
+ * @return a `Parser` that succeeds if `e` is the next available input (and returns it).
*/
def elem(e: Elem): Parser[Elem] = accept(e)
-
- /** A parser that matches only the given element `e'
- *<p>
+ /** A parser that matches only the given element `e`.
+ *
* The method is implicit so that elements can automatically be lifted to their parsers.
- * For example, when parsing `Token's, Identifier("new") (which is a `Token') can be used directly,
- * instead of first creating a `Parser' using accept(Identifier("new")).</p>
+ * For example, when parsing `Token`s, `Identifier("new")` (which is a `Token`) can be used directly,
+ * instead of first creating a `Parser` using `accept(Identifier("new"))`.
*
- * @param e the `Elem' that must be the next piece of input for the returned parser to succeed
- * @return a `tParser' that succeeds if `e' is the next available input.
+ * @param e the `Elem` that must be the next piece of input for the returned parser to succeed
+ * @return a `tParser` that succeeds if `e` is the next available input.
*/
implicit def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("`"+e+"' expected but " + _ + " found")
- /** A parser that matches only the given list of element `es'
+ /** A parser that matches only the given list of element `es`.
*
- * <p>accept(es) succeeds if the input subsequently provides the elements in the list `es'.</p>
+ * `accept(es)` succeeds if the input subsequently provides the elements in the list `es`.
*
* @param es the list of expected elements
* @return a Parser that recognizes a specified list of elements
*/
def accept[ES <% List[Elem]](es: ES): Parser[List[Elem]] = acceptSeq(es)
- /** The parser that matches an element in the domain of the partial function `f'
- *<p>
- * If `f' is defined on the first element in the input, `f' is applied to it to produce
- * this parser's result.</p>
- *<p>
- * Example: The parser <code>accept("name", {case Identifier(n) => Name(n)})</code>
- * accepts an <code>Identifier(n)</code> and returns a <code>Name(n)</code>.</p>
+ /** The parser that matches an element in the domain of the partial function `f`.
+ *
+ * If `f` is defined on the first element in the input, `f` is applied to it to produce
+ * this parser's result.
+ *
+ * Example: The parser `accept("name", {case Identifier(n) => Name(n)})`
+ * accepts an `Identifier(n)` and returns a `Name(n)`
*
* @param expected a description of the kind of element this parser expects (for error messages)
* @param f a partial function that determines when this parser is successful and what its output is
- * @return A parser that succeeds if `f' is applicable to the first element of the input,
- * applying `f' to it to produce the result.
+ * @return A parser that succeeds if `f` is applicable to the first element of the input,
+ * applying `f` to it to produce the result.
*/
def accept[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = acceptMatch(expected, f)
@@ -506,14 +500,14 @@ trait Parsers {
def acceptSeq[ES <% Iterable[Elem]](es: ES): Parser[List[Elem]] = es.foldRight[Parser[List[Elem]]](success(Nil)){(x, pxs) => accept(x) ~ pxs ^^ mkList}
- /** A parser that always fails
+ /** A parser that always fails.
*
* @param msg The error message describing the failure.
* @return A parser that always fails with the specified error message.
*/
def failure(msg: String) = Parser{ in => Failure(msg, in) }
- /** A parser that results in an error
+ /** A parser that results in an error.
*
* @param msg The error message describing the failure.
* @return A parser that always fails with the specified error message.
@@ -521,10 +515,10 @@ trait Parsers {
def err(msg: String) = Parser{ in => Error(msg, in) }
- /** A parser that always succeeds
+ /** A parser that always succeeds.
*
* @param v The result for the parser
- * @return A parser that always succeeds, with the given result `v'
+ * @return A parser that always succeeds, with the given result `v`
*/
def success[T](v: T) = Parser{ in => Success(v, in) }
@@ -537,52 +531,50 @@ trait Parsers {
/** A parser generator for repetitions.
*
- * <p> rep(p) repeatedly uses `p' to parse the input until `p' fails (the result is a List
- * of the consecutive results of `p') </p>
+ * `rep(p)` repeatedly uses `p` to parse the input until `p` fails (the result is a List
+ * of the consecutive results of `p`).
*
- * @param p a `Parser' that is to be applied successively to the input
- * @return A parser that returns a list of results produced by repeatedly applying `p' to the input.
+ * @param p a `Parser` that is to be applied successively to the input
+ * @return A parser that returns a list of results produced by repeatedly applying `p` to the input.
*/
def rep[T](p: => Parser[T]): Parser[List[T]] = rep1(p) | success(List())
/** A parser generator for interleaved repetitions.
*
- * <p> repsep(p, q) repeatedly uses `p' interleaved with `q' to parse the input, until `p' fails.
- * (The result is a `List' of the results of `p'.) </p>
+ * `repsep(p, q)` repeatedly uses `p` interleaved with `q` to parse the input, until `p` fails.
+ * (The result is a `List` of the results of `p`.)
*
- * <p>Example: <code>repsep(term, ",")</code> parses a comma-separated list of term's,
- * yielding a list of these terms</p>
+ * Example: `repsep(term, ",")` parses a comma-separated list of term's, yielding a list of these terms.
*
- * @param p a `Parser' that is to be applied successively to the input
- * @param q a `Parser' that parses the elements that separate the elements parsed by `p'
- * @return A parser that returns a list of results produced by repeatedly applying `p' (interleaved
- * with `q') to the input.
- * The results of `p' are collected in a list. The results of `q' are discarded.
+ * @param p a `Parser` that is to be applied successively to the input
+ * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
+ * @return A parser that returns a list of results produced by repeatedly applying `p` (interleaved with `q`) to the input.
+ * The results of `p` are collected in a list. The results of `q` are discarded.
*/
def repsep[T](p: => Parser[T], q: => Parser[Any]): Parser[List[T]] =
rep1sep(p, q) | success(List())
/** A parser generator for non-empty repetitions.
*
- * <p> rep1(p) repeatedly uses `p' to parse the input until `p' fails -- `p' must succeed at least
- * once (the result is a `List' of the consecutive results of `p')</p>
+ * <p> rep1(p) repeatedly uses `p` to parse the input until `p` fails -- `p` must succeed at least
+ * once (the result is a `List` of the consecutive results of `p`)</p>
*
- * @param p a `Parser' that is to be applied successively to the input
- * @return A parser that returns a list of results produced by repeatedly applying `p' to the input
- * (and that only succeeds if `p' matches at least once).
+ * @param p a `Parser` that is to be applied successively to the input
+ * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
+ * (and that only succeeds if `p` matches at least once).
*/
def rep1[T](p: => Parser[T]): Parser[List[T]] = rep1(p, p)
/** A parser generator for non-empty repetitions.
*
- * <p> rep1(f, p) first uses `f' (which must succeed) and then repeatedly uses `p' to
- * parse the input until `p' fails
- * (the result is a `List' of the consecutive results of `f' and `p')</p>
+ * <p> rep1(f, p) first uses `f` (which must succeed) and then repeatedly uses `p` to
+ * parse the input until `p` fails
+ * (the result is a `List` of the consecutive results of `f` and `p`)</p>
*
- * @param first a `Parser' that parses the first piece of input
- * @param p0 a `Parser' that is to be applied successively to the rest of the input (if any) -- evaluated at most once, and only when necessary
- * @return A parser that returns a list of results produced by first applying `f' and then
- * repeatedly `p' to the input (it only succeeds if `f' matches).
+ * @param first a `Parser` that parses the first piece of input
+ * @param p0 a `Parser` that is to be applied successively to the rest of the input (if any) -- evaluated at most once, and only when necessary
+ * @return A parser that returns a list of results produced by first applying `f` and then
+ * repeatedly `p` to the input (it only succeeds if `f` matches).
*/
@migration(2, 9, "As of 2.9, the p0 call-by-name arguments is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
def rep1[T](first: => Parser[T], p0: => Parser[T]): Parser[List[T]] = Parser { in =>
@@ -607,13 +599,13 @@ trait Parsers {
/** A parser generator for a specified number of repetitions.
*
- * <p> repN(n, p) uses `p' exactly `n' time to parse the input
- * (the result is a `List' of the `n' consecutive results of `p')</p>
+ * `repN(n, p)` uses `p` exactly `n` time to parse the input
+ * (the result is a `List` of the `n` consecutive results of `p`).
*
- * @param p a `Parser' that is to be applied successively to the input
- * @param n the exact number of times `p' must succeed
- * @return A parser that returns a list of results produced by repeatedly applying `p' to the input
- * (and that only succeeds if `p' matches exactly `n' times).
+ * @param p a `Parser` that is to be applied successively to the input
+ * @param n the exact number of times `p` must succeed
+ * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
+ * (and that only succeeds if `p` matches exactly `n` times).
*/
def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
if (num == 0) success(Nil) else Parser { in =>
@@ -632,24 +624,24 @@ trait Parsers {
/** A parser generator for non-empty repetitions.
*
- * <p>rep1sep(p, q) repeatedly applies `p' interleaved with `q' to parse the input, until `p' fails.
- * The parser `p' must succeed at least once.</p>
+ * `rep1sep(p, q)` repeatedly applies `p` interleaved with `q` to parse the input, until `p` fails.
+ * The parser `p` must succeed at least once.
*
- * @param p a `Parser' that is to be applied successively to the input
- * @param q a `Parser' that parses the elements that separate the elements parsed by `p'
- * (interleaved with `q')
- * @return A parser that returns a list of results produced by repeatedly applying `p' to the input
- * (and that only succeeds if `p' matches at least once).
- * The results of `p' are collected in a list. The results of `q' are discarded.
+ * @param p a `Parser` that is to be applied successively to the input
+ * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
+ * (interleaved with `q`)
+ * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
+ * (and that only succeeds if `p` matches at least once).
+ * The results of `p` are collected in a list. The results of `q` are discarded.
*/
def rep1sep[T](p : => Parser[T], q : => Parser[Any]): Parser[List[T]] =
p ~ rep(q ~> p) ^^ {case x~y => x::y}
-
- /** A parser generator that, roughly, generalises the rep1sep generator so that `q', which parses the separator,
- * produces a left-associative function that combines the elements it separates.
+ /** A parser generator that, roughly, generalises the rep1sep generator so that `q`, which parses the separator,
+ * produces a left-associative function that combines the elements it separates.
*
- * <p> From: J. Fokker. Functional parsers. In J. Jeuring and E. Meijer, editors, Advanced Functional Programming, volume 925 of Lecture Notes in Computer Science, pages 1--23. Springer, 1995.</p>
+ * ''From: J. Fokker. Functional parsers. In J. Jeuring and E. Meijer, editors, Advanced Functional Programming,
+ * volume 925 of Lecture Notes in Computer Science, pages 1--23. Springer, 1995.''
*
* @param p a parser that parses the elements
* @param q a parser that parses the token(s) separating the elements, yielding a left-associative function that
@@ -658,7 +650,7 @@ trait Parsers {
def chainl1[T](p: => Parser[T], q: => Parser[(T, T) => T]): Parser[T]
= chainl1(p, p, q)
- /** A parser generator that, roughly, generalises the rep1sep generator so that `q', which parses the separator,
+ /** A parser generator that, roughly, generalises the `rep1sep` generator so that `q`, which parses the separator,
* produces a left-associative function that combines the elements it separates.
*
* @param first a parser that parses the first element
@@ -671,7 +663,7 @@ trait Parsers {
case x ~ xs => xs.foldLeft(x){(_, _) match {case (a, f ~ b) => f(a, b)}}
}
- /** A parser generator that generalises the rep1sep generator so that `q', which parses the separator,
+ /** A parser generator that generalises the `rep1sep` generator so that `q`, which parses the separator,
* produces a right-associative function that combines the elements it separates. Additionally,
* The right-most (last) element and the left-most combining function have to be supplied.
*
@@ -691,16 +683,16 @@ trait Parsers {
/** A parser generator for optional sub-phrases.
*
- * <p>opt(p) is a parser that returns `Some(x)' if `p' returns `x' and `None' if `p' fails</p>
+ * `opt(p)` is a parser that returns `Some(x)` if `p` returns `x` and `None` if `p` fails.
*
- * @param p A `Parser' that is tried on the input
- * @return a `Parser' that always succeeds: either with the result provided by `p' or
+ * @param p A `Parser` that is tried on the input
+ * @return a `Parser` that always succeeds: either with the result provided by `p` or
* with the empty result
*/
def opt[T](p: => Parser[T]): Parser[Option[T]] =
p ^^ (x => Some(x)) | success(None)
- /** Wrap a parser so that its failures&errors become success and vice versa -- it never consumes any input
+ /** Wrap a parser so that its failures and errors become success and vice versa -- it never consumes any input.
*/
def not[T](p: => Parser[T]): Parser[Unit] = Parser { in =>
p(in) match {
@@ -712,8 +704,8 @@ trait Parsers {
/** A parser generator for guard expressions. The resulting parser will fail or succeed
* just like the one given as parameter but it will not consume any input.
*
- * @param p a `Parser' that is to be applied to the input
- * @return A parser that returns success if and only if 'p' succeeds but never consumes any input
+ * @param p a `Parser` that is to be applied to the input
+ * @return A parser that returns success if and only if `p` succeeds but never consumes any input
*/
def guard[T](p: => Parser[T]): Parser[T] = Parser { in =>
p(in) match{
@@ -723,10 +715,10 @@ trait Parsers {
}
- /** `positioned' decorates a parser's result with the start position of the input it consumed.
+ /** `positioned` decorates a parser's result with the start position of the input it consumed.
*
- * @param p a `Parser' whose result conforms to `Positional'.
- * @return A parser that has the same behaviour as `p', but which marks its result with the
+ * @param p a `Parser` whose result conforms to `Positional`.
+ * @return A parser that has the same behaviour as `p`, but which marks its result with the
* start position of the input it consumed, if it didn't already have a position.
*/
def positioned[T <: Positional](p: => Parser[T]): Parser[T] = Parser { in =>
@@ -736,18 +728,14 @@ trait Parsers {
}
}
- /** <p>
- * A parser generator delimiting whole phrases (i.e. programs).
- * </p>
- * <p>
- * <code>phrase(p)</code> succeeds if <code>p</code> succeeds and
- * no input is left over after <code>p</code>.
- * </p>
+ /** A parser generator delimiting whole phrases (i.e. programs).
+ *
+ * `phrase(p)` succeeds if `p` succeeds and no input is left over after `p`.
*
* @param p the parser that must consume all input for the resulting parser
* to succeed.
- * @return a parser that has the same result as `p', but that only succeeds
- * if <code>p</code> consumed all the input.
+ * @return a parser that has the same result as `p`, but that only succeeds
+ * if `p` consumed all the input.
*/
def phrase[T](p: Parser[T]) = new Parser[T] {
lastNoSuccess = null
@@ -768,7 +756,7 @@ trait Parsers {
override def toString = "("+ _1 +"~"+ _2 +")"
}
- /** A parser whose ~ combinator disallows back-tracking.
+ /** A parser whose `~` combinator disallows back-tracking.
*/
trait OnceParser[+T] extends Parser[T] {
override def ~ [U](p: => Parser[U]): Parser[~[T, U]]
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index 134e7e6323..83155470e8 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -69,10 +69,10 @@ trait RegexParsers extends Parsers {
}
}
- /** `positioned' decorates a parser's result with the start position of the input it consumed.
+ /** `positioned` decorates a parser's result with the start position of the input it consumed.
* If whitespace is being skipped, then it is skipped before the start position is recorded.
*
- * @param p a `Parser' whose result conforms to `Positional'.
+ * @param p a `Parser` whose result conforms to `Positional'.
* @return A parser that has the same behaviour as `p', but which marks its result with the
* start position of the input it consumed after whitespace has been skipped, if it
* didn't already have a position.
@@ -91,27 +91,27 @@ trait RegexParsers extends Parsers {
override def phrase[T](p: Parser[T]): Parser[T] =
super.phrase(p <~ opt("""\z""".r))
- /** Parse some prefix of reader `in' with parser `p' */
+ /** Parse some prefix of reader `in` with parser `p`. */
def parse[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
p(in)
- /** Parse some prefix of character sequence `in' with parser `p' */
+ /** Parse some prefix of character sequence `in` with parser `p`. */
def parse[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
p(new CharSequenceReader(in))
- /** Parse some prefix of reader `in' with parser `p' */
+ /** Parse some prefix of reader `in` with parser `p`. */
def parse[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
p(new PagedSeqReader(PagedSeq.fromReader(in)))
- /** Parse all of reader `in' with parser `p' */
+ /** Parse all of reader `in` with parser `p`. */
def parseAll[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
parse(phrase(p), in)
- /** Parse all of reader `in' with parser `p' */
+ /** Parse all of reader `in` with parser `p`. */
def parseAll[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
parse(phrase(p), in)
- /** Parse all of character sequence `in' with parser `p' */
+ /** Parse all of character sequence `in` with parser `p`. */
def parseAll[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
parse(phrase(p), in)
}
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index 8c6be7f8eb..917a4d3a44 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -34,7 +34,7 @@ abstract class Lexical extends Scanners with Tokens {
/** A character-parser that matches a digit (and returns it)*/
def digit = elem("digit", _.isDigit)
- /** A character-parser that matches any character except the ones given in `cs' (and returns it)*/
+ /** A character-parser that matches any character except the ones given in `cs` (and returns it)*/
def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch !=)))
/** A character-parser that matches a white-space character (and returns it)*/
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 1b1a77771c..80331e4e7c 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -39,12 +39,12 @@ trait Scanners extends Parsers {
def whitespace: Parser[Any]
/** <p>
- * <code>Scanner</code> is essentially(*) a parser that produces `Token's
+ * <code>Scanner</code> is essentially(*) a parser that produces `Token`s
* from a stream of characters. The tokens it produces are typically
* passed to parsers in <code>TokenParsers</code>.
* </p>
* <p>
- * Note: (*) <code>Scanner</code> is really a `Reader' of `Token's
+ * Note: (*) <code>Scanner</code> is really a `Reader` of `Token`s
* </p>
*/
class Scanner(in: Reader[Char]) extends Reader[Token] {
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 8e93ccb173..8ff0ff4321 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -22,9 +22,9 @@ import collection.mutable.HashSet
* </p>
* <p>
* To distinguish between identifiers and keywords, it uses a set of reserved identifiers:
- * every string contained in `reserved' is returned as a keyword token.
+ * every string contained in `reserved` is returned as a keyword token.
* (Note that "=>" is hard-coded as a keyword.)
- * Additionally, the kinds of delimiters can be specified by the `delimiters' set.
+ * Additionally, the kinds of delimiters can be specified by the `delimiters` set.
* </p>
* <p>
* Usually this component is used to break character-based input into bigger tokens,
@@ -34,7 +34,7 @@ import collection.mutable.HashSet
* @author Martin Odersky, Iulian Dragos, Adriaan Moors
*/
class StdLexical extends Lexical with StdTokens {
- // see `token' in `Scanners'
+ // see `token` in `Scanners`
def token: Parser[Token] =
( identChar ~ rep( identChar | digit ) ^^ { case first ~ rest => processIdent(first :: rest mkString "") }
| digit ~ rep( digit ) ^^ { case first ~ rest => NumericLit(first :: rest mkString "") }
@@ -50,7 +50,7 @@ class StdLexical extends Lexical with StdTokens {
// legal identifier chars other than digits
def identChar = letter | elem('_')
- // see `whitespace in `Scanners'
+ // see `whitespace in `Scanners`
def whitespace: Parser[Any] = rep(
whitespaceChar
| '/' ~ '*' ~ comment
@@ -63,7 +63,7 @@ class StdLexical extends Lexical with StdTokens {
| chrExcept(EofCh) ~ comment
)
- /** The set of reserved identifiers: these will be returned as `Keyword's */
+ /** The set of reserved identifiers: these will be returned as `Keyword`s */
val reserved = new HashSet[String]
/** The set of delimiters (ordering does not matter) */
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 5db62682f8..e494a69cf0 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -14,7 +14,7 @@ package syntactical
import token._
import lexical.StdLexical
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
+/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
* @author Martin Odersky, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 0938d587bf..c57fc14f5e 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -15,7 +15,7 @@ package syntactical
import token._
import collection.mutable.HashMap
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
+/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
* @author Martin Odersky, Adriaan Moors
*/
@@ -28,7 +28,7 @@ trait StdTokenParsers extends TokenParsers {
/** A parser which matches a single keyword token.
*
* @param chars The character string making up the matched keyword.
- * @return a `Parser' that matches the given string
+ * @return a `Parser` that matches the given string
*/
// implicit def keyword(chars: String): Parser[String] = accept(Keyword(chars)) ^^ (_.chars)
implicit def keyword(chars: String): Parser[String] =
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 430b01fda1..550589ea66 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -16,7 +16,7 @@ package syntactical
* @author Martin Odersky, Adriaan Moors
*/
trait TokenParsers extends Parsers {
- /** Tokens is the abstract type of the `Token's consumed by the parsers in this component*/
+ /** Tokens is the abstract type of the `Token`s consumed by the parsers in this component. */
type Tokens <: token.Tokens
/** lexical is the component responsible for consuming some basic kind of
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
index 8b96a2c241..490442482a 100644
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala
@@ -25,7 +25,7 @@ import scala.util.parsing.combinator.syntactical.TokenParsers
* </p><pre>
* <b>val</b> parser = syntactic.term</pre>
* <p>
- * (if MyParsers extends TokenParsers with a parser called `term')
+ * (if MyParsers extends TokenParsers with a parser called `term`)
* </p>
*
* @author Martin Odersky, Adriaan Moors
@@ -36,7 +36,7 @@ abstract class Tester {
val parser: syntactic.Parser[Any]
- /** Scans a String (using a `syntactic.lexical.Scanner'), parses it
+ /** Scans a String (using a `syntactic.lexical.Scanner`), parses it
* using <code>phrase(parser)</code>, and prints the input and the
* parsed result to the console.
*/
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index a6bea0a91a..4ce62659f6 100644
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -10,7 +10,7 @@ package scala.util.parsing
package combinator
package token
-/** This component provides the standard `Token's for a simple, Scala-like language.
+/** This component provides the standard `Token`s for a simple, Scala-like language.
*
* @author Martin Odersky, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index f5bbb953d2..ad05427fa1 100644
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -10,8 +10,8 @@ package scala.util.parsing
package combinator
package token
-/** This component provides the notion of `Token', the unit of information that is passed from lexical
- * parsers in the `Lexical' component to the parsers in the `TokenParsers' component.
+/** This component provides the notion of `Token`, the unit of information that is passed from lexical
+ * parsers in the `Lexical` component to the parsers in the `TokenParsers` component.
*
* @author Martin Odersky, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index 6e11435c51..67629eacd4 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -21,9 +21,9 @@ object CharArrayReader {
* from an array.
*
* @param source an array of characters
- * @param index starting offset into the array; the first element returned will be `source(index)'
- * @param line the line number of the first element (counting from index `0' of `source')
- * @param column the column number of the first element (counting from index `0' of `source')
+ * @param index starting offset into the array; the first element returned will be `source(index)`
+ * @param line the line number of the first element (counting from index `0` of `source`)
+ * @param column the column number of the first element (counting from index `0` of `source`)
*
* @author Martin Odersky, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 864d8972d1..1df1fa050e 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -45,23 +45,23 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** The column number referred to by the position; column numbers start at 1 */
def column: Int = offset - index(line - 1) + 1
- /** The contents of the line numbered `lnum' (must not contain a new-line character).
+ /** The contents of the line numbered `lnum` (must not contain a new-line character).
*
- * @param lnum a 1-based integer index into the `document'
- * @return the line at `lnum' (not including a newline)
+ * @param lnum a 1-based integer index into the `document`
+ * @return the line at `lnum` (not including a newline)
*/
def lineContents: String =
source.subSequence(index(line - 1), index(line)).toString
- /** Returns a string representation of the `Position', of the form `line.column' */
+ /** Returns a string representation of the `Position`, of the form `line.column` */
override def toString = line+"."+column
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
*
- * @param `that' a `Position' to compare to this `Position'
- * @return true if this position's line or (in case of a tie wrt. line numbers)
- * its column is smaller than the corresponding components of `that'
+ * @param `that` a `Position` to compare to this `Position`
+ * @return true if this position's line number or (in case of equal line numbers)
+ * column is smaller than the corresponding components of `that`
*/
override def <(that: Position) = that match {
case OffsetPosition(_, that_offset) =>
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index bb5180c2c4..b25f03667d 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -15,8 +15,8 @@ package scala.util.parsing.input
* <p>
* It provides functionality for:
* </p><ul>
- * <li> generating a visual representation of this position (`longString');
- * <li> comparing two positions (`<').
+ * <li> generating a visual representation of this position (`longString`);
+ * <li> comparing two positions (`<`).
* </ul>
* <p>
* To use this class for a concrete kind of ``document'', implement the
@@ -33,14 +33,14 @@ trait Position {
/** The column number referred to by the position; column numbers start at 1 */
def column: Int
- /** The contents of the line numbered `lnum' (must not contain a new-line character).
+ /** The contents of the line numbered `lnum` (must not contain a new-line character).
*
- * @param lnum a 1-based integer index into the `document'
- * @return the line at `lnum' (not including a newline)
+ * @param lnum a 1-based integer index into the `document`
+ * @return the line at `lnum` (not including a newline)
*/
protected def lineContents: String
- /** Returns a string representation of the `Position', of the form `line.column' */
+ /** Returns a string representation of the `Position`, of the form `line.column` */
override def toString = ""+line+"."+column
/** Returns a more ``visual'' representation of this position.
@@ -58,9 +58,9 @@ trait Position {
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
*
- * @param `that' a `Position' to compare to this `Position'
- * @return true if this position's line or (in case of a tie wrt. line numbers)
- * its column is smaller than the corresponding components of `that'
+ * @param `that` a `Position` to compare to this `Position`
+ * @return true if this position's line number or (in case of equal line numbers)
+ * column is smaller than the corresponding components of `that`
*/
def <(that: Position) = {
this.line < that.line ||
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
index 5824b5a55e..ff9d81d4e5 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/library/scala/util/parsing/input/Positional.scala
@@ -17,7 +17,7 @@ trait Positional {
/** The source position of this object, initially set to undefined. */
var pos: Position = NoPosition
- /** If current source position is undefined, update it with given position `newpos'
+ /** If current source position is undefined, update it with given position `newpos`
* @return the object itself
*/
def setPos(newpos: Position): this.type = {
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index 176e3afb74..1905b3dcdf 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -32,7 +32,7 @@ object StreamReader {
*
* NOTE:
* StreamReaders do not really fulfill the new contract for readers, which
- * requires a `source' CharSequence representing the full input.
+ * requires a `source` CharSequence representing the full input.
* Instead source is treated line by line.
* As a consequence, regex matching cannot extend beyond a single line
* when a StreamReader are used for input.
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
index d3a57194da..0ed5e3f3bb 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
@@ -726,7 +726,7 @@ abstract class ILPrinterVisitor extends Visitor {
val ta = ct.typeArgs(i)
val sigOpt = primitive.get(ta)
if (sigOpt.isDefined) print(sigOpt.get)
- else printTypeName(ta); /* should be printSignature, but don't want `class' or `valuetype'
+ else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype`
appearing before a type param usage. */
i = i + 1;
if (i < ct.typeArgs.length) {
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index 995c629b5d..f13192f267 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -350,5 +350,5 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* AnnotArg = Tree | Constant
* ConstAnnotArg = Constant | AnnotInfo | AnnotArgArray
*
- * len is remaining length after `len'.
+ * len is remaining length after `len`.
*/