summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-02-29 11:13:44 -0800
committerPaul Phillips <paulp@improving.org>2012-02-29 11:13:44 -0800
commita183c6ad31011b4fb1785655dd3d671b8f5bb519 (patch)
treebbedd1f23b437d36e4796f1ca7ec40dcc96c18e1
parentfc2866efee1bcf17aee18d427ed41e172f440f62 (diff)
downloadscala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.tar.gz
scala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.tar.bz2
scala-a183c6ad31011b4fb1785655dd3d671b8f5bb519.zip
Whitespace commit.
Removed all the trailing whitespace to make eugene happier. Will try to keep it that way by protecting at the merge level. Left the tabs in place because they can't be uniformly changed to spaces, some are 2, some are 4, some are 8, whee.
-rw-r--r--src/compiler/scala/reflect/internal/BaseTypeSeqs.scala12
-rw-r--r--src/compiler/scala/reflect/internal/ClassfileConstants.scala6
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala40
-rw-r--r--src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala2
-rw-r--r--src/compiler/scala/reflect/internal/HasFlags.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala2
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala12
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala20
-rw-r--r--src/compiler/scala/reflect/internal/Scopes.scala8
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala10
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala4
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala76
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala4
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala160
-rw-r--r--src/compiler/scala/reflect/internal/util/Collections.scala10
-rw-r--r--src/compiler/scala/reflect/runtime/ConversionUtil.scala4
-rw-r--r--src/compiler/scala/reflect/runtime/Mirror.scala10
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedOps.scala20
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala38
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedTypes.scala66
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala8
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala18
-rw-r--r--src/compiler/scala/tools/nsc/MacroContext.scala4
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala38
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala4
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala2
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala16
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala6
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala20
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala54
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala28
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala2
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala16
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala2
-rw-r--r--src/compiler/scala/tools/util/EditDistance.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala16
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala2
-rw-r--r--src/library/scala/Enumeration.scala6
-rw-r--r--src/library/scala/Function0.scala8
-rw-r--r--src/library/scala/Function1.scala8
-rw-r--r--src/library/scala/Function2.scala6
-rw-r--r--src/library/scala/PartialFunction.scala10
-rw-r--r--src/library/scala/Product1.scala4
-rw-r--r--src/library/scala/Product10.scala4
-rw-r--r--src/library/scala/Product11.scala4
-rw-r--r--src/library/scala/Product12.scala4
-rw-r--r--src/library/scala/Product13.scala4
-rw-r--r--src/library/scala/Product14.scala4
-rw-r--r--src/library/scala/Product15.scala4
-rw-r--r--src/library/scala/Product16.scala4
-rw-r--r--src/library/scala/Product17.scala4
-rw-r--r--src/library/scala/Product18.scala4
-rw-r--r--src/library/scala/Product19.scala4
-rw-r--r--src/library/scala/Product2.scala4
-rw-r--r--src/library/scala/Product20.scala4
-rw-r--r--src/library/scala/Product21.scala4
-rw-r--r--src/library/scala/Product22.scala4
-rw-r--r--src/library/scala/Product3.scala4
-rw-r--r--src/library/scala/Product4.scala4
-rw-r--r--src/library/scala/Product5.scala4
-rw-r--r--src/library/scala/Product6.scala4
-rw-r--r--src/library/scala/Product7.scala4
-rw-r--r--src/library/scala/Product8.scala4
-rw-r--r--src/library/scala/Product9.scala4
-rw-r--r--src/library/scala/Specializable.scala2
-rw-r--r--src/library/scala/StringContext.scala16
-rw-r--r--src/library/scala/Tuple1.scala2
-rw-r--r--src/library/scala/Tuple10.scala2
-rw-r--r--src/library/scala/Tuple11.scala2
-rw-r--r--src/library/scala/Tuple12.scala2
-rw-r--r--src/library/scala/Tuple13.scala2
-rw-r--r--src/library/scala/Tuple14.scala2
-rw-r--r--src/library/scala/Tuple15.scala2
-rw-r--r--src/library/scala/Tuple16.scala2
-rw-r--r--src/library/scala/Tuple17.scala2
-rw-r--r--src/library/scala/Tuple18.scala2
-rw-r--r--src/library/scala/Tuple19.scala2
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple20.scala2
-rw-r--r--src/library/scala/Tuple21.scala2
-rw-r--r--src/library/scala/Tuple22.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/Tuple4.scala2
-rw-r--r--src/library/scala/Tuple5.scala2
-rw-r--r--src/library/scala/Tuple6.scala2
-rw-r--r--src/library/scala/Tuple7.scala2
-rw-r--r--src/library/scala/Tuple8.scala2
-rw-r--r--src/library/scala/Tuple9.scala2
-rw-r--r--src/library/scala/annotation/elidable.scala4
-rw-r--r--src/library/scala/collection/SeqLike.scala2
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala6
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala10
-rw-r--r--src/library/scala/collection/immutable/Range.scala13
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala26
-rw-r--r--src/library/scala/collection/mutable/Ctrie.scala248
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala40
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala10
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala16
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala10
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala14
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala6
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala50
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala4
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala16
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala10
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala78
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala14
-rw-r--r--src/library/scala/concurrent/Channel.scala6
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala30
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala8
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala56
-rw-r--r--src/library/scala/concurrent/Future.scala194
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala6
-rw-r--r--src/library/scala/concurrent/Promise.scala64
-rw-r--r--src/library/scala/concurrent/Task.scala6
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala34
-rw-r--r--src/library/scala/concurrent/impl/Future.scala20
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala74
-rw-r--r--src/library/scala/concurrent/package.scala14
-rw-r--r--src/library/scala/reflect/ReflectionUtils.scala4
-rw-r--r--src/library/scala/reflect/api/Mirror.scala4
-rw-r--r--src/library/scala/reflect/api/Modifier.scala2
-rwxr-xr-xsrc/library/scala/reflect/api/Names.scala4
-rwxr-xr-xsrc/library/scala/reflect/api/Symbols.scala10
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala8
-rwxr-xr-xsrc/library/scala/reflect/api/Types.scala2
-rw-r--r--src/library/scala/reflect/macro/Context.scala4
-rw-r--r--src/library/scala/specialized.scala2
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/library/scala/util/Try.scala32
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala2
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala2
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala2
177 files changed, 1167 insertions, 1168 deletions
diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
index 9e5c93753f..3753a45133 100644
--- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
@@ -29,12 +29,12 @@ trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
- protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems)
/** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
- * This is necessary because when run from reflection every base type sequence needs to have a
- * SynchronizedBaseTypeSeq as mixin.
+ * This is necessary because when run from reflection every base type sequence needs to have a
+ * SynchronizedBaseTypeSeq as mixin.
*/
class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
@@ -242,7 +242,7 @@ trait BaseTypeSeqs {
// Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG
newBaseTypeSeq(parents, elems)
}
-
+
class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) {
override def apply(i: Int) = f(orig.apply(i))
override def rawElem(i: Int) = f(orig.rawElem(i))
@@ -254,7 +254,7 @@ trait BaseTypeSeqs {
override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max
override def toString = elems.mkString("MBTS(", ",", ")")
- }
-
+ }
+
val CyclicInheritance = new Throwable
}
diff --git a/src/compiler/scala/reflect/internal/ClassfileConstants.scala b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
index 1c4c007de0..eec72d082d 100644
--- a/src/compiler/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
@@ -360,7 +360,7 @@ object ClassfileConstants {
res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
res
}
-
+
def classFlags(jflags: Int): Long = {
initFields(jflags)
isClass = true
@@ -376,11 +376,11 @@ object ClassfileConstants {
}
}
object FlagTranslation extends FlagTranslation { }
-
+
def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
-
+
@deprecated("Use another method in this object", "2.10.0")
def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
if (isClass) toScalaClassFlags(flags)
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 974eb1442c..ec171c5f2c 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -21,7 +21,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
* methods.
*/
private type PolyMethodCreator = List[Symbol] => (Option[List[Type]], Type)
-
+
private def newClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): Symbol = {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
@@ -164,7 +164,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val RuntimePackage = getRequiredModule("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass
-
+
lazy val JavaLangEnumClass = getRequiredClass("java.lang.Enum")
// convenient one-argument parameter lists
@@ -176,10 +176,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def booltype = BooleanClass.typeConstructor
private def inttype = IntClass.typeConstructor
private def stringtype = StringClass.typeConstructor
-
+
// Java types
def javaTypeName(jclazz: Class[_]): TypeName = newTypeName(jclazz.getName)
-
+
def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
case java.lang.Void.TYPE => UnitClass
case java.lang.Byte.TYPE => ByteClass
@@ -213,7 +213,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
lazy val AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef)
- @deprecated("Use AnyRefModule", "2.10.0")
+ @deprecated("Use AnyRefModule", "2.10.0")
def Predef_AnyRef = AnyRefModule
// bottom types
@@ -269,13 +269,13 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val PredefModule: Symbol = getRequiredModule("scala.Predef")
lazy val PredefModuleClass = PredefModule.moduleClass
-
+
def Predef_classOf = getMember(PredefModule, nme.classOf)
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray)
def Predef_??? = getMember(PredefModule, nme.???)
-
+
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
* which does a member lookup (it can't be a lazy val because we might reload Predef
@@ -284,7 +284,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isPredefMemberNamed(sym: Symbol, name: Name) = (
(sym.name == name) && (sym.owner == PredefModule.moduleClass)
)
-
+
/** Specialization.
*/
lazy val SpecializableModule = getRequiredModule("scala.Specializable")
@@ -429,7 +429,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case m: ClassManifest[_] =>
val sym = manifestToSymbol(m)
val args = m.typeArguments
-
+
if ((sym eq NoSymbol) || args.isEmpty) sym.tpe
else appliedType(sym.typeConstructor, args map manifestToType)
case _ =>
@@ -439,7 +439,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def manifestToSymbol(m: ClassManifest[_]): Symbol = m match {
case x: scala.reflect.AnyValManifest[_] =>
getMember(ScalaPackageClass, newTypeName("" + x))
- case _ =>
+ case _ =>
val name = m.erasure.getName
if (name endsWith nme.MODULE_SUFFIX_STRING)
getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING)
@@ -499,7 +499,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case DoubleClass => nme.wrapDoubleArray
case BooleanClass => nme.wrapBooleanArray
case UnitClass => nme.wrapUnitArray
- case _ =>
+ case _ =>
if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
else nme.genericWrapArray
}
@@ -605,7 +605,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def ClassType(arg: Type) =
if (phase.erasedTypes || forMSIL) ClassClass.tpe
else appliedType(ClassClass.typeConstructor, List(arg))
-
+
def vmClassType(arg: Type): Type = ClassType(arg)
def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
@@ -658,7 +658,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => false
})
}
-
+
// members of class scala.Any
lazy val Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
lazy val Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
@@ -796,10 +796,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
while (result.isAliasType) result = result.info.typeSymbol
result
}
-
+
def getRequiredModule(fullname: String): Symbol =
getModule(newTermNameCached(fullname))
- def getRequiredClass(fullname: String): Symbol =
+ def getRequiredClass(fullname: String): Symbol =
getClass(newTypeNameCached(fullname))
def getClassIfDefined(fullname: String): Symbol =
@@ -851,15 +851,15 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol =
owner.newAliasType(name) setInfoAndEnter alias
-
+
private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): Symbol = {
val clazz = newClass(ScalaPackageClass, name, Nil)
val tparam = clazz.newSyntheticTypeParam("T0", flags)
val parents = List(AnyRefClass.tpe, parentFn(tparam))
-
+
clazz setInfo polyType(List(tparam), ClassInfoType(parents, newScope, clazz))
}
-
+
def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): Symbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val tparams = msym.newSyntheticTypeParams(typeParamCount)
@@ -870,7 +870,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
msym setInfoAndEnter polyType(tparams, mtpe)
}
-
+
/** T1 means one type parameter.
*/
def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = {
@@ -977,7 +977,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
-
+
val forced = List( // force initialization of every symbol that is entered as a side effect
AnnotationDefaultAttr, // #2264
RepeatedParamClass,
diff --git a/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
index 47f794681c..f1fe4fc118 100644
--- a/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/compiler/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -14,7 +14,7 @@ import util._
*/
trait ExistentialsAndSkolems {
self: SymbolTable =>
-
+
/** Map a list of type parameter symbols to skolemized symbols, which
* can be deskolemized to the original type parameter. (A skolem is a
* representation of a bound variable when viewed inside its scope.)
diff --git a/src/compiler/scala/reflect/internal/HasFlags.scala b/src/compiler/scala/reflect/internal/HasFlags.scala
index ec4e919bdc..8affd66cd5 100644
--- a/src/compiler/scala/reflect/internal/HasFlags.scala
+++ b/src/compiler/scala/reflect/internal/HasFlags.scala
@@ -136,7 +136,7 @@ trait HasFlags {
/** Whether this entity has NONE of the flags in the given mask.
*/
def hasNoFlags(mask: Long): Boolean = !hasFlag(mask)
-
+
protected def isSetting(f: Long, mask: Long) = !hasFlag(f) && ((mask & f) != 0L)
protected def isClearing(f: Long, mask: Long) = hasFlag(f) && ((mask & f) != 0L)
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
index e675be43dc..23bff950b8 100644
--- a/src/compiler/scala/reflect/internal/Kinds.scala
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -128,7 +128,7 @@ trait Kinds {
// @M sometimes hkargs != arg.typeParams, the symbol and the type may
// have very different type parameters
val hkparams = param.typeParams
-
+
def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
if (!cond)
kindErrors = f(kindErrors)
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index e43a0906a8..12f56976c9 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -22,10 +22,10 @@ trait NameManglers {
val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
-
+
val MODULE_SUFFIX_NAME: TermName = newTermName(MODULE_SUFFIX_STRING)
val NAME_JOIN_NAME: TermName = newTermName(NAME_JOIN_STRING)
-
+
def flattenedName(segments: Name*): NameType = compactedString(segments mkString NAME_JOIN_STRING)
/**
@@ -79,7 +79,7 @@ trait NameManglers {
val SUPER_PREFIX_STRING = "super$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
val SETTER_SUFFIX: TermName = encode("_=")
-
+
@deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
@deprecated("2.10.0", "Use SPECIALIZED_SUFFIX")
@@ -121,13 +121,13 @@ trait NameManglers {
name.subName(i, name.length)
} else name
}
-
+
def unspecializedName(name: Name): Name = (
if (name endsWith SPECIALIZED_SUFFIX)
name.subName(0, name.lastIndexOf('m') - 1)
else name
)
-
+
def macroMethodName(name: Name) = {
val base = if (name.isTypeName) nme.TYPEkw else nme.DEFkw
base append nme.MACRO append name
@@ -158,7 +158,7 @@ trait NameManglers {
def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
-
+
def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
def setterToGetter(name: TermName): TermName = {
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index e6ca4c49ba..5f38374f20 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -73,7 +73,7 @@ trait Names extends api.Names {
/** Create a term name from the characters in cs[offset..offset+len-1]. */
def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
newTermName(cs, offset, len, cachedString = null)
-
+
def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
@@ -87,7 +87,7 @@ trait Names extends api.Names {
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
n = n.next
-
+
if (n ne null) n
else {
// The logic order here is future-proofing against the possibility
@@ -135,7 +135,7 @@ trait Names extends api.Names {
/** The name class.
* TODO - resolve schizophrenia regarding whether to treat Names as Strings
- * or Strings as Names. Give names the key functions the absence of which
+ * or Strings as Names. Give names the key functions the absence of which
* make people want Strings all the time.
*/
sealed abstract class Name(protected val index: Int, protected val len: Int) extends AbsName with Function1[Int, Char] {
@@ -166,7 +166,7 @@ trait Names extends api.Names {
/** Return a new name of the same variety. */
def newName(str: String): ThisNameType
-
+
/** Return a new name based on string transformation. */
def mapName(f: String => String): ThisNameType = newName(f(toString))
@@ -357,7 +357,7 @@ trait Names extends api.Names {
def dropRight(n: Int) = subName(0, len - n)
def drop(n: Int) = subName(n, len)
-
+
def indexOf(ch: Char) = {
val idx = pos(ch)
if (idx == length) -1 else idx
@@ -382,7 +382,7 @@ trait Names extends api.Names {
}
newTermName(cs, 0, len)
}
-
+
/** TODO - reconcile/fix that encode returns a Name but
* decode returns a String.
*/
@@ -393,7 +393,7 @@ trait Names extends api.Names {
def encoded: String = "" + encode
// def decodedName: ThisNameType = newName(decoded)
def encodedName: ThisNameType = encode
-
+
/** Replace operator symbols by corresponding $op_name. */
def encode: ThisNameType = {
val str = toString
@@ -425,7 +425,7 @@ trait Names extends api.Names {
def longString: String = nameKind + " " + decode
def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
}
-
+
/** A name that contains no operator chars nor dollar signs.
* TODO - see if it's any faster to do something along these lines.
*/
@@ -461,7 +461,7 @@ trait Names extends api.Names {
sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
-
+
var next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
@@ -488,7 +488,7 @@ trait Names extends api.Names {
sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
-
+
var next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala
index 37464ebf29..ef48d6102f 100644
--- a/src/compiler/scala/reflect/internal/Scopes.scala
+++ b/src/compiler/scala/reflect/internal/Scopes.scala
@@ -38,11 +38,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
- * This is necessary because when run from reflection every scope needs to have a
- * SynchronizedScope as mixin.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
*/
class Scope protected[Scopes] (initElems: ScopeEntry = null) extends Iterable[Symbol] {
-
+
protected[Scopes] def this(base: Scope) = {
this(base.elems)
nestinglevel = base.nestinglevel + 1
@@ -319,7 +319,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** Create a new scope */
def newScope: Scope = new Scope()
-
+
/** Create a new scope nested in another one with which it shares its elements */
def newNestedScope(outer: Scope): Scope = new Scope(outer)
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index bcd3fc8b14..ef2114b608 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -246,7 +246,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val x_7 : NameType = "x$7"
val x_8 : NameType = "x$8"
val x_9 : NameType = "x$9"
-
+
@switch def syntheticParamName(i: Int): TermName = i match {
case 0 => nme.x_0
case 1 => nme.x_1
@@ -260,9 +260,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case 9 => nme.x_9
case _ => newTermName("x$" + i)
}
-
+
val ??? = encode("???")
-
+
val wrapRefArray: NameType = "wrapRefArray"
val wrapByteArray: NameType = "wrapByteArray"
val wrapShortArray: NameType = "wrapShortArray"
@@ -618,7 +618,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val testLessOrEqualThan: NameType = "testLessOrEqualThan"
val testLessThan: NameType = "testLessThan"
val testNotEqual: NameType = "testNotEqual"
-
+
val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean"
val isBoxedNumber: NameType = "isBoxedNumber"
@@ -670,7 +670,7 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case `toDouble` => toDouble
case _ => NO_NAME
}
-
+
val reflPolyCacheName: NameType = "reflPoly$Cache"
val reflClassCacheName: NameType = "reflClass$Cache"
val reflParamsCacheName: NameType = "reflParams$Cache"
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 4bcf522a8f..7745b21aad 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -42,7 +42,7 @@ abstract class SymbolTable extends api.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
-
+
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
@@ -285,7 +285,7 @@ abstract class SymbolTable extends api.Universe
/** The phase which has given index as identifier. */
val phaseWithId: Array[Phase]
-
+
/** Is this symbol table part of reflexive mirror? In this case
* operations need to be made thread safe.
*/
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index ce85d65050..334436bfbe 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -17,7 +17,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
protected var ids = 0
-
+
val emptySymbolArray = new Array[Symbol](0)
def symbolCount = ids // statistics
@@ -38,14 +38,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
nextexid += 1
newTypeName("_" + nextexid + suffix)
}
-
+
// Set the fields which point companions at one another. Returns the module.
def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
moduleClass.sourceModule = m
m setModuleClass moduleClass
m
}
-
+
/** Create a new free variable. Its owner is NoSymbol.
*/
def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
@@ -67,7 +67,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def selfType: Type = typeOfThis
def typeSignature: Type = info
def typeSignatureIn(site: Type): Type = site memberInfo this
-
+
def asType: Type = tpe
def asTypeIn(site: Type): Type = site.memberType(this)
def asTypeConstructor: Type = typeConstructor
@@ -89,19 +89,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
private[this] var _rawname = initName
private[this] var _rawflags = 0L
-
+
def rawowner = _rawowner
def rawname = _rawname
def rawflags = _rawflags
-
+
protected def rawflags_=(x: FlagsType) { _rawflags = x }
-
+
private var rawpos = initPos
-
+
val id = nextId() // identity displayed when -uniqid
private[this] var _validTo: Period = NoPeriod
-
+
def validTo = _validTo
def validTo_=(x: Period) { _validTo = x}
@@ -179,10 +179,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
new TermSymbol(this, pos, name) initFlags newFlags
-
+
def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
new AbstractTypeSymbol(this, pos, name) initFlags newFlags
-
+
def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
new AliasTypeSymbol(this, pos, name) initFlags newFlags
@@ -194,10 +194,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
new ClassSymbol(this, pos, name) initFlags newFlags
-
+
def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
new ModuleClassSymbol(this, pos, name) initFlags newFlags
-
+
/** Derive whether it is an abstract type from the flags; after creation
* the DEFERRED flag will be ignored.
*/
@@ -206,7 +206,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
newAliasTypeSymbol(name, pos, newFlags)
else
newAbstractTypeSymbol(name, pos, newFlags)
-
+
def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
if ((newFlags & DEFERRED) == 0L)
new TypeSkolem(this, pos, name, origin) initFlags newFlags
@@ -243,7 +243,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
newAliasTypeSymbol(name, pos, newFlags)
-
+
/** Symbol of an abstract type type T >: ... <: ...
*/
final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
@@ -261,7 +261,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def freshName() = { cnt += 1; nme.syntheticParamName(cnt) }
mmap(argtypess)(tp => newValueParameter(freshName(), focusPos(owner.pos), SYNTHETIC) setInfo tp)
}
-
+
def newSyntheticTypeParam(): Symbol = newSyntheticTypeParam("T0", 0L)
def newSyntheticTypeParam(name: String, newFlags: Long): Symbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
def newSyntheticTypeParams(num: Int): List[Symbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
@@ -302,7 +302,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
newClassSymbol(name, pos, newFlags)
-
+
/** A new class with its info set to a ClassInfoType with given scope and parents. */
def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L) = {
val clazz = newClass(name, pos, newFlags)
@@ -354,9 +354,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newAliasType(pos: Position, name: TypeName): Symbol = newAliasType(name, pos)
@deprecated("Use the other signature", "2.10.0")
def newAbstractType(pos: Position, name: TypeName): Symbol = newAbstractType(name, pos)
- @deprecated("Use the other signature", "2.10.0")
+ @deprecated("Use the other signature", "2.10.0")
def newExistential(pos: Position, name: TypeName): Symbol = newExistential(name, pos)
- @deprecated("Use the other signature", "2.10.0")
+ @deprecated("Use the other signature", "2.10.0")
def newMethod(pos: Position, name: TermName): MethodSymbol = newMethod(name, pos)
// ----- locking and unlocking ------------------------------------------------------
@@ -838,7 +838,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def addModuleSuffix(n: Name): Name =
if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
-
+
def moduleSuffix: String = (
if (needsModuleSuffix) nme.MODULE_SUFFIX_STRING
else ""
@@ -846,7 +846,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Whether this symbol needs nme.MODULE_SUFFIX_STRING (aka $) appended on the java platform.
*/
def needsModuleSuffix = (
- hasModuleFlag
+ hasModuleFlag
&& !isMethod
&& !isImplClass
&& !isJavaDefined
@@ -873,7 +873,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (owner.isEffectiveRoot) name
else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
)
-
+
def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
/** The encoded full path name of this symbol, where outer names and inner names
@@ -1028,7 +1028,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else modifyInfo(_.substSym(syms0, syms1))
def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
-
+
/** Set the info and enter this symbol into the owner's scope. */
def setInfoAndEnter(info: Type): this.type = {
setInfo(info)
@@ -1337,7 +1337,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def isNestedIn(that: Symbol): Boolean =
owner == that || owner != NoSymbol && (owner isNestedIn that)
-
+
/** Is this class symbol a subclass of that symbol,
* and is this class symbol also different from Null or Nothing? */
def isNonBottomSubClass(that: Symbol): Boolean = false
@@ -1398,10 +1398,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
if (newName != nme.NO_NAME)
clone.name = newName
-
+
clone
}
-
+
/** Internal method to clone a symbol's implementation with the given flags and no info. */
def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol
def cloneSymbolImpl(owner: Symbol): Symbol = cloneSymbolImpl(owner, 0L)
@@ -1594,7 +1594,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
@deprecated("Use enclosingTopLevelClass")
def toplevelClass: Symbol = enclosingTopLevelClass
-
+
/** The top-level class containing this symbol. */
def enclosingTopLevelClass: Symbol =
if (owner.isPackageClass) {
@@ -1870,7 +1870,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Remove any access boundary and clear flags PROTECTED | PRIVATE.
*/
def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags
-
+
/** The first parameter to the first argument list of this method,
* or NoSymbol if inapplicable.
*/
@@ -2154,7 +2154,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def referenced: Symbol = _referenced
def referenced_=(x: Symbol) { _referenced = x }
-
+
def existentialBound = singletonBounds(this.tpe)
def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
@@ -2248,7 +2248,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isMethod && needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname)
-
+
flatname
}
else rawname.toTermName
@@ -2284,7 +2284,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
res
}
}
-
+
class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
// Temporary programmatic help tracking down who might do such a thing
@@ -2299,13 +2299,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AliasTypeSymbol =
owner.newAliasTypeSymbol(name, pos, newFlags)
}
-
+
class AbstractTypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) with AbstractTypeMixin {
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AbstractTypeSymbol =
owner.newAbstractTypeSymbol(name, pos, newFlags)
}
-
+
/** Might be mixed into TypeSymbol or TypeSkolem.
*/
trait AbstractTypeMixin extends TypeSymbol {
@@ -2503,7 +2503,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final override def isNonClassType = false
final override def isAbstractType = false
final override def isAliasType = false
-
+
override def existentialBound = polyType(this.typeParams, TypeBounds.upper(this.classBound))
override def sourceFile =
@@ -2531,19 +2531,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
thisTypeCache
}
-
+
override def owner: Symbol =
if (needsFlatClasses) rawowner.owner else rawowner
override def name: TypeName = (
if (needsFlatClasses) {
if (flatname eq null)
flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
-
+
flatname
}
else rawname.toTypeName
)
-
+
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2728,7 +2728,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val syms1 = cloneSymbolsAtOwner(syms, owner)
creator(syms1, tpe.substSym(syms, syms1))
}
-
+
/** A deep map on a symbol's paramss.
*/
def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index e3ee39d2a0..3252b970d1 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -146,7 +146,7 @@ abstract class TreeInfo {
true
}
-
+
/**
* Selects the correct parameter list when there are nested applications.
* Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
@@ -175,7 +175,7 @@ abstract class TreeInfo {
}
def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
- case _ =>
+ case _ =>
}
/** Is symbol potentially a getter of a variable?
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 3782b24c02..54cc53aaac 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -121,7 +121,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ChangeOwnerTraverser(oldOwner, newOwner) apply t
}
}
-
+
def substTreeSyms(pairs: (Symbol, Symbol)*): Tree = {
val list = pairs.toList
val subst = new TreeSymSubstituter(list map (_._1), list map (_._2))
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 9f5f7ffe61..ed1631fae5 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -110,13 +110,13 @@ trait Types extends api.Types { self: SymbolTable =>
* to undo constraints in the case of isSubType/isSameType failure.
*/
lazy val undoLog = newUndoLog
-
+
protected def newUndoLog = new UndoLog
-
+
class UndoLog {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
private var log: UndoPairs = List()
-
+
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
@@ -136,7 +136,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] def record(tv: TypeVar) = {
log ::= ((tv, tv.constr.cloneInternal))
}
-
+
private[scala] def clear() {
if (settings.debug.value)
self.log("Clearing " + log.size + " entries from the undoLog.")
@@ -428,7 +428,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** For a typeref, its arguments. The empty list for all other types */
def typeArgs: List[Type] = List()
-
+
/** A list of placeholder types derived from the type parameters.
* Used by RefinedType and TypeRef.
*/
@@ -525,7 +525,7 @@ trait Types extends api.Types { self: SymbolTable =>
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
-
+
/** A list of all non-private members defined or declared in this type. */
def nonPrivateDecls: List[Symbol] = decls filter (x => !x.isPrivate) toList
@@ -566,7 +566,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
-
+
/** All members with the given flags, excluding bridges.
*/
def membersWithFlags(requiredFlags: Long): List[Symbol] =
@@ -591,7 +591,7 @@ trait Types extends api.Types { self: SymbolTable =>
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def nonLocalMember(name: Name): Symbol =
memberBasedOnName(name, BridgeFlags | LOCAL)
-
+
/** Members excluding and requiring the given flags.
* Note: unfortunately it doesn't work to exclude DEFERRED this way.
*/
@@ -1237,7 +1237,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] var underlyingPeriod = NoPeriod
override def underlying: Type = {
val cache = underlyingCache
- if (underlyingPeriod == currentPeriod && cache != null) cache
+ if (underlyingPeriod == currentPeriod && cache != null) cache
else {
defineUnderlyingOfSingleType(this)
underlyingCache
@@ -1280,7 +1280,7 @@ trait Types extends api.Types { self: SymbolTable =>
unique(new UniqueSingleType(pre, sym))
}
}
-
+
protected def defineUnderlyingOfSingleType(tpe: SingleType) = {
val period = tpe.underlyingPeriod
if (period != currentPeriod) {
@@ -1350,13 +1350,13 @@ trait Types extends api.Types { self: SymbolTable =>
override def baseTypeSeq: BaseTypeSeq = {
val cached = baseTypeSeqCache
- if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
+ if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
cached
else {
defineBaseTypeSeqOfCompoundType(this)
if (baseTypeSeqCache eq undetBaseTypeSeq)
throw new RecoverableCyclicReference(typeSymbol)
-
+
baseTypeSeqCache
}
}
@@ -1370,7 +1370,7 @@ trait Types extends api.Types { self: SymbolTable =>
defineBaseClassesOfCompoundType(this)
if (baseClassesCache eq null)
throw new RecoverableCyclicReference(typeSymbol)
-
+
baseClassesCache
}
}
@@ -1410,13 +1410,13 @@ trait Types extends api.Types { self: SymbolTable =>
// override def isNullable: Boolean =
// parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
-
+
override def safeToString: String =
parents.mkString(" with ") +
(if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
decls.mkString("{", "; ", "}") else "")
}
-
+
protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
val period = tpe.baseTypeSeqPeriod;
if (period != currentPeriod) {
@@ -1469,7 +1469,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (tpe.baseTypeSeqCache eq undetBaseTypeSeq)
throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
}
-
+
protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
def computeBaseClasses: List[Symbol] =
if (tpe.parents.isEmpty) List(tpe.typeSymbol)
@@ -1751,7 +1751,7 @@ trait Types extends api.Types { self: SymbolTable =>
// override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
override def kind = "ClassInfoType"
-
+
override def safeToString =
if (settings.debug.value || decls.size > 1)
formattedToString
@@ -1801,13 +1801,13 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
* with synchronized, because they are accessed only from isVolatile, which is called only from
* Typer.
*/
private var volatileRecursions: Int = 0
private val pendingVolatiles = new mutable.HashSet[Symbol]
-
+
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) with UniqueType {
require(args0.nonEmpty, this)
@@ -1825,7 +1825,7 @@ trait Types extends api.Types { self: SymbolTable =>
asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
}
-
+
// note: does not go through typeRef. There's no need to because
// neither `pre` nor `sym` changes. And there's a performance
// advantage to call TypeRef directly.
@@ -1840,7 +1840,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isHigherKinded = typeParams.nonEmpty
override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
-
+
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
if (isHigherKinded) {
if (sameLength(formals intersect typeParams, typeParams))
@@ -1860,9 +1860,9 @@ trait Types extends api.Types { self: SymbolTable =>
res
}
- override def transformInfo(tp: Type): Type =
+ override def transformInfo(tp: Type): Type =
appliedType(asSeenFromOwner(tp), dummyArgs)
-
+
override def narrow =
if (sym.isModuleClass) singleType(pre, sym.sourceModule)
else super.narrow
@@ -1870,14 +1870,14 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeConstructor = this
// eta-expand, subtyping relies on eta-expansion of higher-kinded types
- override protected def normalizeImpl: Type =
+ override protected def normalizeImpl: Type =
if (isHigherKinded) etaExpand else super.normalizeImpl
}
-
+
trait ClassTypeRef extends TypeRef {
// !!! There are scaladoc-created symbols arriving which violate this require.
// require(sym.isClass, sym)
-
+
override protected def normalizeImpl: Type =
if (sym.isRefinementClass) sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
else super.normalizeImpl
@@ -1886,7 +1886,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym == clazz) this
else transform(sym.info.baseType(clazz))
}
-
+
trait NonClassTypeRef extends TypeRef {
require(sym.isNonClassType, sym)
@@ -1905,11 +1905,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
relativeInfoCache
}
-
+
override def baseType(clazz: Symbol): Type =
if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz)
}
-
+
protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try {
basetypeRecursions += 1
if (basetypeRecursions < LogPendingBaseTypesThreshold)
@@ -1926,7 +1926,7 @@ trait Types extends api.Types { self: SymbolTable =>
} finally {
basetypeRecursions -= 1
}
-
+
trait AliasTypeRef extends NonClassTypeRef {
require(sym.isAliasType, sym)
@@ -1944,7 +1944,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (typeParamsMatchArgs) betaReduce.normalize
else if (isHigherKinded) super.normalizeImpl
else ErrorType
-
+
// isHKSubType0 introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
@@ -1954,7 +1954,7 @@ trait Types extends api.Types { self: SymbolTable =>
// this crashes pos/depmet_implicit_tpbetareduce.scala
// appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
def betaReduce = transform(sym.info.resultType)
-
+
// #3731: return sym1 for which holds: pre bound sym.name to sym and
// pre1 now binds sym.name to sym1, conceptually exactly the same
// symbol as sym. The selection of sym on pre must be updated to the
@@ -1968,12 +1968,12 @@ trait Types extends api.Types { self: SymbolTable =>
// TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
case _ => sym
}
-
+
}
trait AbstractTypeRef extends NonClassTypeRef {
require(sym.isAbstractType, sym)
-
+
/** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment
*/
private var symInfoCache: Type = _
@@ -2002,7 +2002,7 @@ trait Types extends api.Types { self: SymbolTable =>
volatileRecursions -= 1
}
}
-
+
override def thisInfo = {
val symInfo = sym.info
if (thisInfoCache == null || (symInfo ne symInfoCache)) {
@@ -2035,7 +2035,7 @@ trait Types extends api.Types { self: SymbolTable =>
private[reflect] var parentsPeriod = NoPeriod
private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
private[reflect] var baseTypeSeqPeriod = NoPeriod
- private var normalized: Type = _
+ private var normalized: Type = _
// @M: propagate actual type params (args) to `tp`, by replacing
// formal type parameters with actual ones. If tp is higher kinded,
@@ -2057,7 +2057,7 @@ trait Types extends api.Types { self: SymbolTable =>
normalized
}
}
-
+
def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
@@ -2111,12 +2111,12 @@ trait Types extends api.Types { self: SymbolTable =>
}
thisInfo.decls
}
-
+
protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
val cache = baseTypeSeqCache
- if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
+ if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
cache
else {
defineBaseTypeSeqOfTypeRef(this)
@@ -2210,7 +2210,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
})
}
-
+
protected def defineParentsOfTypeRef(tpe: TypeRef) = {
val period = tpe.parentsPeriod
if (period != currentPeriod) {
@@ -2222,7 +2222,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
}
-
+
protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
@@ -2382,7 +2382,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
object PolyType extends PolyTypeExtractor
-
+
/** A creator for existential types which flattens nested existentials.
*/
def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
@@ -2436,7 +2436,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** An existential can only be printed with wildcards if:
* - the underlying type is a typeref
* - where there is a 1-to-1 correspondence between underlying's typeargs and quantified
- * - and none of the existential parameters is referenced from anywhere else in the type
+ * - and none of the existential parameters is referenced from anywhere else in the type
* - and none of the existential parameters are singleton types
*/
private def isRepresentableWithWildcards = !settings.debug.value && {
@@ -2597,7 +2597,7 @@ trait Types extends api.Types { self: SymbolTable =>
else if (args.isEmpty) new HKTypeVar(origin, constr, params)
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
-
+
trace("create", "In " + tv.originLocation)(tv)
}
}
@@ -2638,7 +2638,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def isHigherKinded = true
override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
}
-
+
/** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
*/
class AppliedTypeVar(
@@ -2646,17 +2646,17 @@ trait Types extends api.Types { self: SymbolTable =>
_constr: TypeConstraint,
zippedArgs: List[(Symbol, Type)]
) extends TypeVar(_origin, _constr) {
-
+
require(zippedArgs.nonEmpty, this)
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
-
+
override protected def typeVarString = (
zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
)
}
-
+
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
@@ -2674,7 +2674,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeArgs: List[Type] = Nil
override def isHigherKinded = false
- /** The constraint associated with the variable
+ /** The constraint associated with the variable
* Syncnote: Type variables are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of `constr`
@@ -2685,7 +2685,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** The variable's skolemization level */
val level = skolemizationLevel
-
+
/** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
* ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
*
@@ -2716,7 +2716,7 @@ trait Types extends api.Types { self: SymbolTable =>
// inference may generate several TypeVar's for a single type parameter that must be inferred,
// only one of them is in the set of tvars that need to be solved, but
// they share the same TypeConstraint instance
-
+
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
@@ -2940,7 +2940,7 @@ trait Types extends api.Types { self: SymbolTable =>
def originLocation = {
val sym = origin.typeSymbolDirect
val encl = sym.owner.logicallyEnclosingMember
-
+
// This should display somewhere between one and three
// things which enclose the origin: at most, a class, a
// a method, and a term. At least, a class.
@@ -3272,7 +3272,7 @@ trait Types extends api.Types { self: SymbolTable =>
case WildcardType => tycon // needed for neg/t0226
case _ => abort(debugString(tycon))
}
-
+
/** A creator for existential types where the type arguments,
* rather than being applied directly, are interpreted as the
* upper bounds of unknown types. For instance if the type argument
@@ -3283,7 +3283,7 @@ trait Types extends api.Types { self: SymbolTable =>
tycon match {
case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
val eparams = typeParamsToExistentials(sym)
- val bounds = args map (TypeBounds upper _)
+ val bounds = args map (TypeBounds upper _)
(eparams, bounds).zipped foreach (_ setInfo _)
newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
@@ -3387,7 +3387,7 @@ trait Types extends api.Types { self: SymbolTable =>
mapOver(tp)
}
}
-
+
/** Type with all top-level occurrences of abstract types replaced by their bounds */
def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
case TypeRef(_, sym, _) if sym.isAbstractType =>
@@ -3497,7 +3497,7 @@ trait Types extends api.Types { self: SymbolTable =>
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
def this() = this(List(), List())
-
+
/* Syncnote: Type constraints are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of any
@@ -3571,7 +3571,7 @@ trait Types extends api.Types { self: SymbolTable =>
val hi = hiBounds filterNot (_.typeSymbolDirect eq AnyClass)
val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
+
lostr ++ histr mkString ("[", " | ", "]")
}
if (inst eq NoType) boundsStr
@@ -3597,7 +3597,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def variance = _variance
def variance_=(x: Int) = _variance = x
-
+
override protected def noChangeToSymbols(origSyms: List[Symbol]) = {
origSyms forall { sym =>
val v = variance
@@ -3763,7 +3763,7 @@ trait Types extends api.Types { self: SymbolTable =>
protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
args mapConserve this
-
+
/** Called by mapOver to determine whether the original symbols can
* be returned, or whether they must be cloned. Overridden in VariantTypeMap.
*/
@@ -3777,7 +3777,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (elems1 eq elems) scope
else newScopeWith(elems1: _*)
}
-
+
/** Map this function over given list of symbols */
def mapOver(origSyms: List[Symbol]): List[Symbol] = {
// fast path in case nothing changes due to map
@@ -3840,7 +3840,7 @@ trait Types extends api.Types { self: SymbolTable =>
def traverse(tp: Type): Unit
def apply(tp: Type): Type = { traverse(tp); tp }
}
-
+
abstract class TypeTraverserWithResult[T] extends TypeTraverser {
def result: T
def clear(): Unit
@@ -3860,13 +3860,13 @@ trait Types extends api.Types { self: SymbolTable =>
*/
// class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
// variance = v
- //
+ //
// def traverse(tp: Type) = tp match {
// case ExistentialType(_, _) if (variance == v) => result = true
// case _ => mapOver(tp)
// }
// }
- //
+ //
// val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
// val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
@@ -3917,7 +3917,7 @@ trait Types extends api.Types { self: SymbolTable =>
mapOver(tp)
}
}
-
+
/** Used by existentialAbstraction.
*/
class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
@@ -3935,10 +3935,10 @@ trait Types extends api.Types { self: SymbolTable =>
countOccs(tpe)
for (tparam <- tparams)
countOccs(tparam.info)
-
+
apply(tpe)
}
-
+
def apply(tp: Type): Type = {
val tp1 = mapOver(tp)
if (variance == 0) tp1
@@ -4331,7 +4331,7 @@ trait Types extends api.Types { self: SymbolTable =>
def apply(tp: Type): Type = mapOver(tp) match {
// unsound to replace args by unstable actual #3873
case SingleType(NoPrefix, StableArg(arg)) => arg
- // (soundly) expand type alias selections on implicit arguments,
+ // (soundly) expand type alias selections on implicit arguments,
// see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
val arg = actuals(pid)
@@ -4360,7 +4360,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
// TODO: this should be simplified; in the stable case, one can
// probably just use an Ident to the tree.symbol.
- //
+ //
// @PP: That leads to failure here, where stuff no longer has type
// 'String @Annot("stuff")' but 'String @Annot(x)'.
//
@@ -4517,12 +4517,12 @@ trait Types extends api.Types { self: SymbolTable =>
result
}
}
-
+
protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
+
protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
var result: Symbol = _
-
+
def clear() { result = null }
private def register(sym: Symbol) {
@@ -4540,7 +4540,7 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => mapOver(tp)
}
}
-
+
private lazy val commonOwnerMapObj = new CommonOwnerMap
class MissingAliasControl extends ControlThrowable
@@ -4548,7 +4548,7 @@ trait Types extends api.Types { self: SymbolTable =>
class MissingTypeControl extends ControlThrowable
object adaptToNewRunMap extends TypeMap {
-
+
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
if (phase.flatClasses) {
sym
@@ -4715,7 +4715,7 @@ trait Types extends api.Types { self: SymbolTable =>
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+ forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
else if (arg1.isInstanceOf[TypeVar])
@@ -5738,8 +5738,8 @@ trait Types extends api.Types { self: SymbolTable =>
val formatted = tableDef.table(transposed)
println("** Depth is " + depth + "\n" + formatted)
}
-
- /** From a list of types, find any which take type parameters
+
+ /** From a list of types, find any which take type parameters
* where the type parameter bounds contain references to other
* any types in the list (including itself.)
*
@@ -6258,13 +6258,13 @@ trait Types extends api.Types { self: SymbolTable =>
if (ts exists (_.isNotNull)) res.notNull else res
}
-
+
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
var tvs: List[TypeVar] = Nil
tp foreach {
case t: TypeVar => tvs ::= t
- case _ =>
+ case _ =>
}
tvs.reverse
}
@@ -6276,7 +6276,7 @@ trait Types extends api.Types { self: SymbolTable =>
// !!! Is it somehow guaranteed that this will not break under nesting?
// In general one has to save and restore the contents of the field...
tvs foreach (_.suspended = true)
- tvs
+ tvs
}
/** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
@@ -6504,5 +6504,5 @@ trait Types extends api.Types { self: SymbolTable =>
} finally {
tostringRecursions -= 1
}
-
+
}
diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala
index e3fb1a9cad..cc48be1684 100644
--- a/src/compiler/scala/reflect/internal/util/Collections.scala
+++ b/src/compiler/scala/reflect/internal/util/Collections.scala
@@ -64,7 +64,7 @@ trait Collections {
}
lb.toList
}
-
+
final def foreachWithIndex[A, B](xs: List[A])(f: (A, Int) => Unit) {
var index = 0
var ys = xs
@@ -98,7 +98,7 @@ trait Collections {
val x2 = ys2.head
if (p(x1, x2))
buf += ((x1, x2))
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -130,7 +130,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty) {
if (f(ys1.head, ys2.head))
return true
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -142,7 +142,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty) {
if (!f(ys1.head, ys2.head))
return false
-
+
ys1 = ys1.tail
ys2 = ys2.tail
}
@@ -155,7 +155,7 @@ trait Collections {
while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
if (!f(ys1.head, ys2.head, ys3.head))
return false
-
+
ys1 = ys1.tail
ys2 = ys2.tail
ys3 = ys3.tail
diff --git a/src/compiler/scala/reflect/runtime/ConversionUtil.scala b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
index e75fd78590..8c32026e37 100644
--- a/src/compiler/scala/reflect/runtime/ConversionUtil.scala
+++ b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
@@ -23,7 +23,7 @@ trait ConversionUtil { self: SymbolTable =>
toJavaMap(s) = j
}
- def toScala(key: J)(body: => S): S = synchronized {
+ def toScala(key: J)(body: => S): S = synchronized {
toScalaMap get key match {
case Some(v) =>
v
@@ -34,7 +34,7 @@ trait ConversionUtil { self: SymbolTable =>
}
}
- def toJava(key: S)(body: => J): J = synchronized {
+ def toJava(key: S)(body: => J): J = synchronized {
toJavaMap get key match {
case Some(v) =>
v
diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala
index 028a660a35..d3e4dd7619 100644
--- a/src/compiler/scala/reflect/runtime/Mirror.scala
+++ b/src/compiler/scala/reflect/runtime/Mirror.scala
@@ -16,12 +16,12 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
val clazz = javaClass(name, defaultReflectiveClassLoader())
classToScala(clazz)
}
-
+
def companionInstance(clazz: Symbol): AnyRef = {
val singleton = ReflectionUtils.singletonInstance(clazz.fullName, defaultReflectiveClassLoader())
singleton
}
-
+
def symbolOfInstance(obj: Any): Symbol = classToScala(obj.getClass)
def typeOfInstance(obj: Any): Type = typeToScala(obj.getClass)
// to do add getClass/getType for instances of primitive types, probably like this:
@@ -41,8 +41,8 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
case nme.update => return Array.set(receiver, args(0).asInstanceOf[Int], args(1))
}
}
-
- val jmeth = methodToJava(meth)
+
+ val jmeth = methodToJava(meth)
jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
}
@@ -51,7 +51,7 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
override def typeToClass(tpe: Type): java.lang.Class[_] = typeToJavaClass(tpe)
override def symbolToClass(sym: Symbol): java.lang.Class[_] = classToJava(sym)
-
+
override def inReflexiveMirror = true
}
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
index 72adbd4004..dd806beb2a 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,22 +1,22 @@
package scala.reflect
package runtime
-trait SynchronizedOps extends internal.SymbolTable
+trait SynchronizedOps extends internal.SymbolTable
with SynchronizedSymbols
with SynchronizedTypes { self: SymbolTable =>
-
+
// Names
-
+
private lazy val nameLock = new Object
-
+
override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
-
+
// BaseTypeSeqs
-
- override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+
+ override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
-
+
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
override def apply(i: Int): Type = synchronized { super.apply(i) }
override def rawElem(i: Int) = synchronized { super.rawElem(i) }
@@ -30,9 +30,9 @@ trait SynchronizedOps extends internal.SymbolTable
override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
}
-
+
// Scopes
-
+
override def newScope = new Scope() with SynchronizedScope
override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
index 9baf94f71d..3f2fa30be2 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -6,61 +6,61 @@ import internal.Flags.DEFERRED
trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override protected def nextId() = synchronized { super.nextId() }
-
- override protected def freshExistentialName(suffix: String) =
+
+ override protected def freshExistentialName(suffix: String) =
synchronized { super.freshExistentialName(suffix) }
// Set the fields which point companions at one another. Returns the module.
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
synchronized { super.connectModuleToClass(m, moduleClass) }
-
+
override def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
new FreeVar(name, value) with SynchronizedTermSymbol initFlags newFlags setInfo tpe
override protected def makeNoSymbol = new NoSymbol with SynchronizedSymbol
-
+
trait SynchronizedSymbol extends Symbol {
-
+
override def rawowner = synchronized { super.rawowner }
override def rawname = synchronized { super.rawname }
override def rawflags = synchronized { super.rawflags }
-
+
override def rawflags_=(x: FlagsType) = synchronized { super.rawflags_=(x) }
override def name_=(x: Name) = synchronized { super.name_=(x) }
override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
-
+
override def validTo = synchronized { super.validTo }
override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
-
+
override def pos = synchronized { super.pos }
override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
override def privateWithin = synchronized { super.privateWithin }
- override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
+ override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
- override def info = synchronized { super.info }
+ override def info = synchronized { super.info }
override def info_=(info: Type) = synchronized { super.info_=(info) }
- override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
+ override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
override def rawInfo: Type = synchronized { super.rawInfo }
override def typeParams: List[Symbol] = synchronized { super.typeParams }
- override def reset(completer: Type) = synchronized { super.reset(completer) }
+ override def reset(completer: Type) = synchronized { super.reset(completer) }
- override def infosString: String = synchronized { super.infosString }
+ override def infosString: String = synchronized { super.infosString }
override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
- override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
+ override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
// ------ creators -------------------------------------------------------------------
override def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
-
+
override def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
new AbstractTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
-
+
override def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
new AliasTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
@@ -72,10 +72,10 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
new ClassSymbol(this, pos, name) with SynchronizedClassSymbol initFlags newFlags
-
+
override def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
new ModuleClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags
-
+
override def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
if ((newFlags & DEFERRED) == 0L)
new TypeSkolem(this, pos, name, origin) with SynchronizedTypeSymbol initFlags newFlags
@@ -116,4 +116,4 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
override def implicitMembers: List[Symbol] = synchronized { super.implicitMembers }
}
}
-
+
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
index c842d3dd01..e5a508f802 100644
--- a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
@@ -2,86 +2,86 @@ package scala.reflect
package runtime
/** This trait overrides methods in reflect.internal, bracketing
- * them in synchronized { ... } to make them thread-safe
+ * them in synchronized { ... } to make them thread-safe
*/
trait SynchronizedTypes extends internal.Types { self: SymbolTable =>
-
+
// No sharing of map objects:
override protected def commonOwnerMap = new CommonOwnerMap
-
+
private val uniqueLock = new Object
override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { super.unique(tp) }
-
+
class SynchronizedUndoLog extends UndoLog {
-
- override def clear() =
+
+ override def clear() =
synchronized { super.clear() }
-
+
override def undo[T](block: => T): T =
synchronized { super.undo(block) }
-
+
override def undoUnless(block: => Boolean): Boolean =
synchronized { super.undoUnless(block) }
}
-
+
override protected def newUndoLog = new SynchronizedUndoLog
-
- override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
+
+ override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
-
- private val subsametypeLock = new Object
-
+
+ private val subsametypeLock = new Object
+
override def isSameType(tp1: Type, tp2: Type): Boolean =
subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
-
+
override def isDifferentType(tp1: Type, tp2: Type): Boolean =
subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
-
+
override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
-
+
private val lubglbLock = new Object
-
+
override def glb(ts: List[Type]): Type =
lubglbLock.synchronized { super.glb(ts) }
-
+
override def lub(ts: List[Type]): Type =
lubglbLock.synchronized { super.lub(ts) }
-
+
private val indentLock = new Object
-
+
override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
indentLock.synchronized { super.explain(op, p, tp1, arg2) }
}
-
+
private val toStringLock = new Object
override protected def typeToString(tpe: Type): String =
toStringLock.synchronized(super.typeToString(tpe))
-
- /* The idea of caches is as follows.
+
+ /* The idea of caches is as follows.
* When in reflexive mode, a cache is either null, or one sentinal
* value representing undefined or the final defined
* value. Hence, we can ask in non-synchronized ode whether the cache field
- * is non null and different from the sentinel (if a sentinel exists).
+ * is non null and different from the sentinel (if a sentinel exists).
* If that's true, the cache value is current.
* Otherwise we arrive in one of the defined... methods listed below
* which go through all steps in synchronized mode.
*/
-
+
override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
-
- override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
+
+ override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
- override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
+ override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
-
- override protected def defineParentsOfTypeRef(tpe: TypeRef) =
+
+ override protected def defineParentsOfTypeRef(tpe: TypeRef) =
tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
-
- override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
+
+ override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 92d6e6320c..c92474b33e 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -123,10 +123,10 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the compiler to generate unchecked information. */
private var unchecked: Boolean = false
-
+
/** Instruct the ant task not to fail in the event of errors */
private var nofail: Boolean = false
-
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -356,7 +356,7 @@ class Scaladoc extends ScalaMatchingTask {
def setDocUncompilable(input: String) {
docUncompilable = Some(input)
}
-
+
/** Set the `nofail` info attribute.
*
* @param input One of the flags `yes/no` or `on/off`. Default if no/off.
@@ -569,7 +569,7 @@ class Scaladoc extends ScalaMatchingTask {
}
def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
-
+
/** Performs the compilation. */
override def execute() = {
val Pair(docSettings, sourceFiles) = initialize
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 940d115b2f..d6f57801e7 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -74,7 +74,7 @@ trait CompilationUnits { self: Global =>
* It is empty up to phase 'icode'.
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
-
+
def echo(pos: Position, msg: String) =
reporter.echo(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index ff8d86873c..1470a43491 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -154,7 +154,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Register top level class (called on entering the class)
*/
def registerTopLevelSym(sym: Symbol) {}
-
+
// ------------------ Reporting -------------------------------------
// not deprecated yet, but a method called "error" imported into
@@ -710,18 +710,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
private lazy val unitTimings = mutable.HashMap[CompilationUnit, Long]() withDefaultValue 0L // tracking time spent per unit
private def unitTimingsFormatted(): String = {
def toMillis(nanos: Long) = "%.3f" format nanos / 1000000d
-
+
val formatter = new util.TableDef[(String, String)] {
>> ("ms" -> (_._1)) >+ " "
<< ("path" -> (_._2))
}
"" + (
- new formatter.Table(unitTimings.toList sortBy (-_._2) map {
+ new formatter.Table(unitTimings.toList sortBy (-_._2) map {
case (unit, nanos) => (toMillis(nanos), unit.source.path)
})
)
}
-
+
protected def addToPhasesSet(sub: SubComponent, descr: String) {
phasesSet += sub
phasesDescMap(sub) = descr
@@ -866,7 +866,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def currentRun: Run = curRun
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
-
+
// TODO - trim these to the absolute minimum.
@inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
@inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
@@ -935,7 +935,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Counts for certain classes of warnings during this run. */
var deprecationWarnings: List[(Position, String)] = Nil
var uncheckedWarnings: List[(Position, String)] = Nil
-
+
/** A flag whether macro expansions failed */
var macroExpansionFailed = false
@@ -1082,7 +1082,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
}
def cancel() { reporter.cancelled = true }
-
+
private def currentProgress = (phasec * size) + unitc
private def totalProgress = (phaseDescriptors.size - 1) * size // -1: drops terminal phase
private def refreshProgress() = if (size > 0) progress(currentProgress, totalProgress)
@@ -1250,12 +1250,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
*/
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
- catch { case ex =>
+ catch { case ex =>
globalError(supplementErrorMessage("uncaught exception during compilation: " + ex.getClass.getName))
throw ex
}
}
-
+
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
units foreach addUnit
if (opt.profileAll) {
diff --git a/src/compiler/scala/tools/nsc/MacroContext.scala b/src/compiler/scala/tools/nsc/MacroContext.scala
index 72662291f8..9ea1f87125 100644
--- a/src/compiler/scala/tools/nsc/MacroContext.scala
+++ b/src/compiler/scala/tools/nsc/MacroContext.scala
@@ -3,8 +3,8 @@ package scala.tools.nsc
import symtab.Flags._
trait MacroContext extends reflect.macro.Context { self: Global =>
-
+
def captureVariable(vble: Symbol): Unit = vble setFlag CAPTURED
-
+
def referenceCapturedVariable(id: Ident): Tree = ReferenceToBoxed(id)
}
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index 7e832a56b0..df63035007 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -46,7 +46,7 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
-
+
@inline final def atOwnPhase[T](op: => T) = global.atPhase(ownPhase)(op)
@inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index a94154e0ff..d7159c5fa8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -30,7 +30,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
else
tree
}
-
+
/** Builds a fully attributed wildcard import node.
*/
def mkWildcardImport(pkg: Symbol): Import = {
@@ -160,7 +160,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
def mkModuleVarDef(accessor: Symbol) = {
val inClass = accessor.owner.isClass
val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
+
val mval = (
accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
setInfo accessor.tpe.finalResultType
@@ -225,7 +225,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), 1 to numParams map (_ => Bind(tpnme.WILDCARD, EmptyTree)) toList)
}
def mkBindForCase(patVar: Symbol, clazz: Symbol, targs: List[Type]): Tree = {
- Bind(patVar, Typed(Ident(nme.WILDCARD),
+ Bind(patVar, Typed(Ident(nme.WILDCARD),
if (targs.isEmpty) mkAppliedTypeForCase(clazz)
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 8e445a62db..2b95300bad 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -38,7 +38,7 @@ trait ParsersCommon extends ScannersCommon {
def freshTypeName(prefix: String): TypeName
def deprecationWarning(off: Int, msg: String): Unit
def accept(token: Int): Int
-
+
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
* instead of the contents of the groupers. However in all cases accept(LPAREN)
@@ -1141,7 +1141,7 @@ self =>
private def interpolatedString(): Tree = atPos(in.offset) {
val start = in.offset
val interpolator = in.name
-
+
val partsBuf = new ListBuffer[Tree]
val exprBuf = new ListBuffer[Tree]
in.nextToken()
@@ -1153,7 +1153,7 @@ self =>
}
}
if (in.token == STRINGLIT) partsBuf += literal()
-
+
val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
t2 setPos t2.pos.makeTransparent
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index f712c7411f..2626ca26a6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -84,7 +84,7 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
-
+
def isAtEnd = charOffset >= buf.length
def flush = { charOffset = offset; nextChar(); this }
@@ -164,7 +164,7 @@ trait Scanners extends ScannersCommon {
* RBRACE if region starts with '{'
* ARROW if region starts with `case'
* STRINGLIT if region is a string interpolation expression starting with '${'
- * (the STRINGLIT appears twice in succession on the stack iff the
+ * (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
var sepRegions: List[Int] = List()
@@ -173,15 +173,15 @@ trait Scanners extends ScannersCommon {
/** Are we directly in a string interpolation expression?
*/
- @inline private def inStringInterpolation =
+ @inline private def inStringInterpolation =
sepRegions.nonEmpty && sepRegions.head == STRINGLIT
-
+
/** Are we directly in a multiline string interpolation expression?
* @pre: inStringInterpolation
*/
- @inline private def inMultiLineInterpolation =
+ @inline private def inMultiLineInterpolation =
inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
-
+
/** read next token and return last offset
*/
def skipToken(): Offset = {
@@ -205,7 +205,7 @@ trait Scanners extends ScannersCommon {
case CASE =>
sepRegions = ARROW :: sepRegions
case RBRACE =>
- while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
docBuffer = null
@@ -223,7 +223,7 @@ trait Scanners extends ScannersCommon {
sepRegions = sepRegions.tail
case _ =>
}
-
+
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
@@ -327,8 +327,8 @@ trait Scanners extends ScannersCommon {
'z' =>
putChar(ch)
nextChar()
- getIdentRest()
- if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
token = INTERPOLATIONID
case '<' => // is XMLSTART?
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
@@ -409,7 +409,7 @@ trait Scanners extends ScannersCommon {
token = STRINGLIT
strVal = ""
}
- } else {
+ } else {
getStringLit()
}
}
@@ -632,8 +632,8 @@ trait Scanners extends ScannersCommon {
else finishNamed()
}
}
-
-
+
+
// Literals -----------------------------------------------------------------
private def getStringLit() = {
@@ -661,20 +661,20 @@ trait Scanners extends ScannersCommon {
getRawStringLit()
}
}
-
+
@annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
next.lastOffset = charOffset - 1
next.offset = charOffset - 1
- }
+ }
if (ch == '"') {
nextRawChar()
if (!multiLine || isTripleQuote()) {
setStrVal()
token = STRINGLIT
- } else
+ } else
getStringPart(multiLine)
} else if (ch == '$') {
nextRawChar()
@@ -706,12 +706,12 @@ trait Scanners extends ScannersCommon {
getStringPart(multiLine)
}
}
-
+
private def fetchStringPart() = {
offset = charOffset - 1
getStringPart(multiLine = inMultiLineInterpolation)
}
-
+
private def isTripleQuote(): Boolean =
if (ch == '"') {
nextRawChar()
@@ -732,7 +732,7 @@ trait Scanners extends ScannersCommon {
false
}
- /** copy current character into cbuf, interpreting any escape sequences,
+ /** copy current character into cbuf, interpreting any escape sequences,
* and advance to next character.
*/
protected def getLitChar(): Unit =
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 091f333c27..fb4daefd57 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -58,7 +58,7 @@ object Tokens extends Tokens {
final val BACKQUOTED_IDENT = 11
def isIdentifier(code: Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
+
@switch def canBeginExpression(code: Int) = code match {
case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
case LBRACE|LPAREN|LBRACKET|COMMENT => true
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 4ab0eb0129..68c4ac03f6 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -18,7 +18,7 @@ trait BasicBlocks {
import opcodes._
import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
-
+
object NoBasicBlock extends BasicBlock(-1, null)
/** This class represents a basic block. Each
@@ -182,7 +182,7 @@ trait BasicBlocks {
final def foreach[U](f: Instruction => U) = {
if (!closed) dumpMethodAndAbort(method, this)
else instrs foreach f
-
+
// !!! If I replace "instrs foreach f" with the following:
// var i = 0
// val len = instrs.length
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 44a58e75b4..97247dd89b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -21,7 +21,7 @@ trait Members {
self: ICodes =>
import global._
-
+
object NoCode extends Code(null, "NoCode") {
override def blocksList: List[BasicBlock] = Nil
}
@@ -138,7 +138,7 @@ trait Members {
/** Represent a field in ICode */
class IField(val symbol: Symbol) extends IMember { }
-
+
object NoIMethod extends IMethod(NoSymbol) { }
/**
@@ -212,7 +212,7 @@ trait Members {
def isStatic: Boolean = symbol.isStaticMember
override def toString() = symbol.fullName
-
+
def matchesSignature(other: IMethod) = {
(symbol.name == other.symbol.name) &&
(params corresponds other.params)(_.kind == _.kind) &&
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index ba4b250303..8a2ec9a191 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -21,7 +21,7 @@ trait TypeStacks {
* stack of the ICode.
*/
type Rep = List[TypeKind]
-
+
object NoTypeStack extends TypeStack(Nil) { }
class TypeStack(var types: Rep) {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 32177c309a..c217869a48 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -83,7 +83,7 @@ trait BytecodeWriters {
emitJavap(bytes, javapFile)
}
}
-
+
trait ClassBytecodeWriter extends BytecodeWriter {
def writeClass(label: String, jclass: JClass, sym: Symbol) {
val outfile = getFile(sym, jclass, ".class")
@@ -94,18 +94,18 @@ trait BytecodeWriters {
informProgress("wrote '" + label + "' to " + outfile)
}
}
-
+
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
-
+
abstract override def writeClass(label: String, jclass: JClass, sym: Symbol) {
super.writeClass(label, jclass, sym)
-
+
val pathName = jclass.getName()
var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
-
+
try jclass writeTo outstream
finally outstream.close()
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 23ee0bb33d..387b7fb3d7 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -152,7 +152,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (settings.Ygenjavap.isDefault) {
if(settings.Ydumpclasses.isDefault)
new ClassBytecodeWriter { }
- else
+ else
new ClassBytecodeWriter with DumpBytecodeWriter { }
}
else new ClassBytecodeWriter with JavapBytecodeWriter { }
@@ -207,7 +207,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
-
+
final val ExcludedForwarderFlags = {
import Flags._
// Should include DEFERRED but this breaks findMember.
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 089ef9cf35..176c00c025 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,7 +18,7 @@ abstract class Changes {
abstract class Change
- private lazy val annotationsChecked =
+ private lazy val annotationsChecked =
List(definitions.SpecializedClass) // Any others that should be checked?
private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index 395757237b..02be916f59 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -146,7 +146,7 @@ trait DependencyAnalysis extends SubComponent with Files {
d.symbol match {
case s : ModuleClassSymbol =>
val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
-
+
if (isTopLevelModule && (s.companionModule != NoSymbol)) {
dependencies.emits(source, nameToFile(unit.source.file, name))
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 7eb8c393f3..127faf8ed9 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -104,7 +104,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
mb.useCaseOf.get.inDefinitionTemplates
case _ =>
- if (inTpl == null)
+ if (inTpl == null)
makeRootPackage.toList
else
makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
@@ -123,14 +123,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else Public()
}
}
- def flags = {
+ def flags = {
val fgs = mutable.ListBuffer.empty[Paragraph]
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
- fgs.toList
+ fgs.toList
}
def deprecation =
if (sym.isDeprecated)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index 39a1a406ba..68c8f2fdb8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -92,7 +92,7 @@ trait ExprTyper {
case _ => NoType
}
}
-
+
def evaluate(): Type = {
typeOfExpressionDepth += 1
try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 16085c07d6..e1ea69842f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -382,7 +382,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def findToolsJar() = {
val jdkPath = Directory(jdkHome)
val jar = jdkPath / "lib" / "tools.jar" toFile;
-
+
if (jar isFile)
Some(jar)
else if (jdkPath.isDirectory)
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index 073501912a..d579e0369e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -61,7 +61,7 @@ trait Imports {
def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedTermNamed(name: String): Symbol =
+ def importedTermNamed(name: String): Symbol =
importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index 0e2c34efbf..f9c1907696 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -61,7 +61,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def packageNames = packages map tos
def aliasNames = aliases map tos
}
-
+
object NoTypeCompletion extends TypeMemberCompletion(NoType) {
override def memberNamed(s: String) = NoSymbol
override def members = Nil
@@ -165,11 +165,11 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
override def follow(id: String): Option[CompletionAware] = {
if (!completions(0).contains(id))
return None
-
+
val tpe = intp typeOfExpression id
if (tpe == NoType)
return None
-
+
def default = Some(TypeMemberCompletion(tpe))
// only rebinding vals in power mode for now.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index ef84876b94..14876425f4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -155,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
( rutil.info[ReplValsImpl].membersDeclared
filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
- sortBy (_.decodedName)
+ sortBy (_.decodedName)
map to_str
mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
)
@@ -165,7 +165,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
}
object InternalInfo extends LowPriorityInternalInfo { }
-
+
/** Now dealing with the problem of acidentally calling a method on Type
* when you're holding a Symbol and seeing the Symbol converted to the
* type of Symbol rather than the type of the thing represented by the
@@ -176,7 +176,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def apply[T: Manifest] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
}
object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
+
}
class InternalInfoWrapper[T: Manifest](value: Option[T] = None) {
def ? : InternalInfo[T] = new InternalInfo[T](value)
@@ -190,7 +190,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
private def newInfo[U: Manifest](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
-
+
/** Standard noise reduction filter. */
def excludeMember(s: Symbol) = (
isSpecialized(s)
@@ -218,7 +218,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
def membersInherited = members filterNot (membersDeclared contains _)
def memberTypes = members filter (_.name.isTypeName)
def memberMethods = members filter (_.isMethod)
-
+
def pkg = symbol.enclosingPackage
def pkgName = pkg.fullName
def pkgClass = symbol.enclosingPackageClass
@@ -343,12 +343,12 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
def sigs = syms map (_.defString)
def infos = syms map (_.info)
}
-
+
trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
-
+
implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
@@ -375,7 +375,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
+
implicit def liftToTermName(s: String): TermName = newTermName(s)
implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index b20017c1d3..a68392f0fb 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -50,7 +50,7 @@ object ReplVals {
def mkManifestToType[T <: Global](global: T) = {
import global._
import definitions._
-
+
/** We can't use definitions.manifestToType directly because we're passing
* it to map and the compiler refuses to perform eta expansion on a method
* with a dependent return type. (Can this be relaxed?) To get around this
@@ -59,7 +59,7 @@ object ReplVals {
*/
def manifestToType(m: OptManifest[_]): Global#Type =
definitions.manifestToType(m)
-
+
class AppliedTypeFromManifests(sym: Symbol) {
def apply[M](implicit m1: Manifest[M]): Type =
if (sym eq NoSymbol) NoType
@@ -69,7 +69,7 @@ object ReplVals {
if (sym eq NoSymbol) NoType
else appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type]))
}
-
+
(sym: Symbol) => new AppliedTypeFromManifests(sym)
}
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index f19a285d7c..309fc5733f 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -56,10 +56,10 @@ abstract class Reporter {
*/
def echo(msg: String): Unit = info(NoPosition, msg, true)
def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
-
+
/** Informational messages, suppressed unless -verbose or force=true. */
def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
-
+
/** Warnings and errors. */
def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index a47bfda8c1..fb85ebeeb0 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -9,4 +9,4 @@ package symtab
import ast.{Trees, TreePrinters, DocComments}
import util._
-abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
+abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a7ddfae819..07d132f7dd 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -368,7 +368,7 @@ abstract class ClassfileParser {
case arr: Type => Constant(arr)
}
}
-
+
private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
val decodedLength = ByteCodecs.decode(bytes)
val arr = new Array[Byte](decodedLength)
@@ -424,7 +424,7 @@ abstract class ClassfileParser {
def forceMangledName(name: Name, module: Boolean): Symbol = {
val parts = name.decode.toString.split(Array('.', '$'))
var sym: Symbol = definitions.RootClass
-
+
// was "at flatten.prev"
beforeFlatten {
for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
@@ -432,7 +432,7 @@ abstract class ClassfileParser {
sym.linkedClassOfClass.info
sym.info.decl(part.encode)
}//.suchThat(module == _.isModule)
-
+
sym = (
if (sym1 ne NoSymbol) sym1
else sym.info.decl(part.encode.toTypeName)
@@ -721,7 +721,7 @@ abstract class ClassfileParser {
index += 1
val bounds = variance match {
case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
- case '-' =>
+ case '-' =>
val tp = sig2type(tparams, skiptvs)
// sig2type seems to return AnyClass regardless of the situation:
// we don't want Any as a LOWER bound.
@@ -1211,7 +1211,7 @@ abstract class ClassfileParser {
else
getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol,
+ assert(s ne NoSymbol,
"" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " +
" while parsing " + ((in.file, busy)) +
sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index a6ecb16b43..d04c6115ca 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -33,14 +33,14 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private def savingStatics[T](body: => T): T = {
val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
- val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
+ val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
val result = body
clearStatics()
newStaticMembers ++= savedNewStaticMembers
newStaticInits ++= savedNewStaticInits
symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
+
result
}
private def transformTemplate(tree: Tree) = {
@@ -102,7 +102,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** The boxed type if it's a primitive; identity otherwise.
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
-
+
override def transform(tree: Tree): Tree = tree match {
/* Transforms dynamic calls (i.e. calls to methods that are undefined
@@ -139,7 +139,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case ad@ApplyDynamic(qual0, params) =>
if (settings.logReflectiveCalls.value)
unit.echo(ad.pos, "method invocation uses reflection")
-
+
val typedPos = typedWithPos(ad.pos) _
assert(ad.symbol.isPublic)
@@ -151,7 +151,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val flags = PRIVATE | STATIC | SYNTHETIC | (
if (isFinal) FINAL else 0
)
-
+
val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
@@ -493,7 +493,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val t: Tree = ad.symbol.tpe match {
case MethodType(mparams, resType) =>
assert(params.length == mparams.length, mparams)
-
+
typedPos {
val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
qual = safeREF(sym)
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index c15da6e9a9..45045b1909 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -68,7 +68,7 @@ abstract class ExplicitOuter extends InfoTransform
result
}
-
+
private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
@@ -95,7 +95,7 @@ abstract class ExplicitOuter extends InfoTransform
val accFlags = SYNTHETIC | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethodSymbol(nme.OUTER, clazz.pos, accFlags)
val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
-
+
sym expandName clazz
sym.referenced = clazz
sym setInfo MethodType(Nil, restpe)
@@ -163,7 +163,7 @@ abstract class ExplicitOuter extends InfoTransform
decls1 = decls.cloneScope
val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
outerAcc expandName clazz
-
+
decls1 enter newOuterAccessor(clazz)
if (hasOuterField(clazz)) //2
decls1 enter newOuterField(clazz)
@@ -468,7 +468,7 @@ abstract class ExplicitOuter extends InfoTransform
}
}
super.transform(
- deriveTemplate(tree)(decls =>
+ deriveTemplate(tree)(decls =>
if (newDefs.isEmpty) decls
else decls ::: newDefs.toList
)
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index dafce76d45..570eaba3a9 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -18,7 +18,7 @@ abstract class LambdaLift extends InfoTransform {
/** the following two members override abstract members in Transform */
val phaseName: String = "lambdalift"
-
+
/** Converts types of captured variables to *Ref types.
*/
def boxIfCaptured(sym: Symbol, tpe: Type, erasedTypes: Boolean) =
@@ -75,10 +75,10 @@ abstract class LambdaLift extends InfoTransform {
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
-
+
/** True if we are transforming under a ReferenceToBoxed node */
private var isBoxedRef = false
-
+
private type SymSet = TreeSet[Symbol]
private def newSymSet = new TreeSet[Symbol](_ isLess _)
@@ -221,7 +221,7 @@ abstract class LambdaLift extends InfoTransform {
for (caller <- called.keys ; callee <- called(caller) ; fvs <- free get callee ; fv <- fvs)
markFree(fv, caller)
} while (changedFreeVars)
-
+
def renameSym(sym: Symbol) {
val originalName = sym.name
val base = sym.name + nme.NAME_JOIN_STRING + (
@@ -245,7 +245,7 @@ abstract class LambdaLift extends InfoTransform {
debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name))
}
-
+
for (sym <- renamable) {
// If we renamed a trait from Foo to Foo$1, we must rename the implementation
// class from Foo$class to Foo$1$class. (Without special consideration it would
@@ -324,7 +324,7 @@ abstract class LambdaLift extends InfoTransform {
val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
sym.updateInfo(
lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
-
+
copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
case ClassDef(_, _, _, _) =>
// Disabled attempt to to add getters to freeParams
@@ -419,10 +419,10 @@ abstract class LambdaLift extends InfoTransform {
def refConstr(expr: Tree): Tree = expr match {
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
- case _ =>
+ case _ =>
New(sym.tpe, expr)
}
- def refConstrCase(cdef: CaseDef): CaseDef =
+ def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
refConstr(constructorArg)
@@ -467,7 +467,7 @@ abstract class LambdaLift extends InfoTransform {
tree
}
}
-
+
private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
override def transform(tree: Tree): Tree = tree match {
@@ -476,7 +476,7 @@ abstract class LambdaLift extends InfoTransform {
case _ =>
postTransform(preTransform(tree))
}
-
+
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 75d3e443d4..85ba539993 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -125,7 +125,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
}
case ValDef(_, _, _, _) if !sym.owner.isModule && !sym.owner.isClass =>
- deriveValDef(tree) { rhs0 =>
+ deriveValDef(tree) { rhs0 =>
val rhs = super.transform(rhs0)
if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs
}
@@ -133,7 +133,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
case l@LabelDef(name0, params0, ifp0@If(_, _, _)) if name0.startsWith(nme.WHILE_PREFIX) =>
val ifp1 = super.transform(ifp0)
val If(cond0, thenp0, elsep0) = ifp1
-
+
if (LocalLazyValFinder.find(thenp0))
deriveLabelDef(l)(_ => treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
else
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3a2482e816..050425c558 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -203,7 +203,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
if (needsExpandedSetterName(field))
setter.name = nme.expandedSetterName(setter.name, clazz)
-
+
setter
}
@@ -350,7 +350,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setAnnotations accessed.annotations)
}
}
- }
+ }
else if (member.isSuperAccessor) { // mixin super accessors
val member1 = addMember(clazz, member.cloneSymbol(clazz)) setPos clazz.pos
assert(member1.alias != NoSymbol, member1)
@@ -533,7 +533,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
self = sym.newValueParameter(nme.SELF, sym.pos) setInfo toInterface(currentOwner.typeOfThis)
val selfdef = ValDef(self) setType NoType
copyDefDef(tree)(vparamss = List(selfdef :: vparams))
- }
+ }
else EmptyTree
}
else {
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index e49f8d7c0b..d8c18c2d50 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -50,7 +50,7 @@ abstract class OverridingPairs {
val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
-
+
result
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index e5d1e348d6..e5df144f2e 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -110,7 +110,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tp baseType GroupOfSpecializable match {
case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
arg.typeArgs map (_.typeSymbol)
- case _ =>
+ case _ =>
List(tp.typeSymbol)
}
}
@@ -515,7 +515,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED))
-
+
sClass.sourceFile = clazz.sourceFile
currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
@@ -1226,7 +1226,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else NoSymbol
def illegalSpecializedInheritance(clazz: Symbol): Boolean = (
- hasSpecializedFlag(clazz)
+ hasSpecializedFlag(clazz)
&& originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait)
)
@@ -1291,7 +1291,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMember = specCandidates suchThat { s =>
doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
}
-
+
debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
debuglog("[specSym] found specMember: " + specMember)
if (specMember ne NoSymbol)
@@ -1405,7 +1405,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
(new CollectMethodBodies)(tree)
val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) =>
TypeTree(tpe) setPos parent.pos)
-
+
treeCopy.Template(tree,
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
@@ -1419,7 +1419,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val superRef: Tree = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
}
- if (symbol.isPrimaryConstructor)
+ if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
else // duplicate the original constructor
duplicateBody(ddef, info(symbol).target)
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 23697a4730..848d6be47b 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -228,7 +228,7 @@ abstract class TailCalls extends Transform {
debuglog("Considering " + dd.name + " for tailcalls")
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree)(rhs =>
+ deriveDefDef(tree)(rhs =>
if (newCtx.isTransformed) {
/** We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 2af6192e42..7d66549a52 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -147,7 +147,7 @@ abstract class UnCurry extends InfoTransform
private def nonLocalReturnThrow(expr: Tree, meth: Symbol) = localTyper typed {
Throw(
nonLocalReturnExceptionType(expr.tpe.widen),
- Ident(nonLocalReturnKey(meth)),
+ Ident(nonLocalReturnKey(meth)),
expr
)
}
@@ -247,7 +247,7 @@ abstract class UnCurry extends InfoTransform
else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe)
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
- val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL)
+ val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL)
applyMethod setInfoAndEnter MethodType(applyMethod newSyntheticValueParams formals, restpe)
anonClass addAnnotation serialVersionUIDAnnotation
@@ -451,7 +451,7 @@ abstract class UnCurry extends InfoTransform
gen.mkZero(tree.tpe) setType tree.tpe
}
}
-
+
private def isSelfSynchronized(ddef: DefDef) = ddef.rhs match {
case Apply(fn @ TypeApply(Select(sel, _), _), _) =>
fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait
@@ -516,7 +516,7 @@ abstract class UnCurry extends InfoTransform
else translateSynchronized(tree) match {
case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
-
+
withNeedLift(false) {
if (dd.symbol.isClassConstructor) {
atOwner(sym) {
@@ -781,7 +781,7 @@ abstract class UnCurry extends InfoTransform
// add the method to `newMembers`
newMembers += forwtree
}
-
+
flatdd
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 140df53816..afe0b42167 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -31,14 +31,14 @@ trait ContextErrors {
case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
extends AbsTypeError {
-
+
def errPos:Position = underlyingTree.pos
override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
}
case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
extends AbsTypeError {
-
+
def errPos = underlyingSym.pos
}
@@ -76,7 +76,7 @@ trait ContextErrors {
}
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
-
+
def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
"type mismatch" + foundReqMsg(found, req) + missingArgsMsg
@@ -143,12 +143,12 @@ trait ContextErrors {
found
}
assert(!found.isErroneous && !req.isErroneous, (found, req))
-
+
issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
if (settings.explaintypes.value)
explainTypes(found, req)
}
-
+
def WithFilterError(tree: Tree, ex: AbsTypeError) = {
issueTypeError(ex)
setError(tree)
@@ -177,13 +177,13 @@ trait ContextErrors {
val calcSimilar = (
name.length > 2 && (
startingIdentCx.reportErrors
- || startingIdentCx.enclClassOrMethod.reportErrors
+ || startingIdentCx.enclClassOrMethod.reportErrors
)
)
- // avoid calculating if we're in "silent" mode.
- // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
+ // avoid calculating if we're in "silent" mode.
+ // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
val similar = {
- if (!calcSimilar) ""
+ if (!calcSimilar) ""
else {
val allowed = (
startingIdentCx.enclosingContextChain
@@ -672,7 +672,7 @@ trait ContextErrors {
type ErrorType = Value
val WrongNumber, NoParams, ArgsDoNotConform = Value
}
-
+
private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) =
if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) {
val methodName = nme.defaultGetterToMethod(sym1.name)
@@ -718,11 +718,11 @@ trait ContextErrors {
"constructor cannot be instantiated to expected type" + foundReqMsg(restpe, pt))
setError(tree)
}
-
+
def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = {
issueNormalTypeError(tree,
applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
- // since inferMethodAlternative modifies the state of the tree
+ // since inferMethodAlternative modifies the state of the tree
// we have to set the type of tree to ErrorType only in the very last
// fallback action that is done in the inference (tracking it manually is error prone).
// This avoids entering infinite loop in doTypeApply.
@@ -842,14 +842,14 @@ trait ContextErrors {
object NamerErrorGen {
implicit val context0 = context
-
+
object SymValidateErrors extends Enumeration {
val ImplicitConstr, ImplicitNotTerm, ImplicitTopObject,
OverrideClass, SealedNonClass, AbstractNonClass,
OverrideConstr, AbstractOverride, LazyAndEarlyInit,
ByNameParameter, AbstractVar = Value
}
-
+
object DuplicatesErrorKinds extends Enumeration {
val RenamedTwice, AppearsTwice = Value
}
@@ -857,7 +857,7 @@ trait ContextErrors {
import SymValidateErrors._
import DuplicatesErrorKinds._
import symtab.Flags
-
+
def TypeSigError(tree: Tree, ex: TypeError) = {
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
@@ -866,7 +866,7 @@ trait ContextErrors {
context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
}
-
+
def GetterDefinedTwiceError(getter: Symbol) =
issueSymbolTypeError(getter, getter+" is defined twice")
@@ -909,37 +909,37 @@ trait ContextErrors {
val msg = errKind match {
case ImplicitConstr =>
"`implicit' modifier not allowed for constructors"
-
+
case ImplicitNotTerm =>
"`implicit' modifier can be used only for values, variables and methods"
-
+
case ImplicitTopObject =>
"`implicit' modifier cannot be used for top-level objects"
-
+
case OverrideClass =>
"`override' modifier not allowed for classes"
-
+
case SealedNonClass =>
"`sealed' modifier can be used only for classes"
-
+
case AbstractNonClass =>
"`abstract' modifier can be used only for classes; it should be omitted for abstract members"
-
+
case OverrideConstr =>
"`override' modifier not allowed for constructors"
-
+
case AbstractOverride =>
"`abstract override' modifier only allowed for members of traits"
-
+
case LazyAndEarlyInit =>
"`lazy' definitions may not be initialized early"
-
+
case ByNameParameter =>
"pass-by-name arguments not allowed for case class parameters"
-
+
case AbstractVar =>
"only classes can have declared but undefined members" + abstractVarMessage(sym)
-
+
}
issueSymbolTypeError(sym, msg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 8586ebf0d4..90e07023bb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -21,7 +21,7 @@ trait Contexts { self: Analyzer =>
outer = this
enclClass = this
enclMethod = this
-
+
override def nextEnclosing(p: Context => Boolean): Context = this
override def enclosingContextChain: List[Context] = Nil
override def implicitss: List[List[ImplicitInfo]] = Nil
@@ -128,7 +128,7 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
-
+
var buffer: Set[AbsTypeError] = _
def enclClassOrMethod: Context =
@@ -179,7 +179,7 @@ trait Contexts { self: Analyzer =>
buffer.clear()
current
}
-
+
def logError(err: AbsTypeError) = buffer += err
def withImplicitsDisabled[T](op: => T): T = {
@@ -240,7 +240,7 @@ trait Contexts { self: Analyzer =>
c.implicitsEnabled = true
c
}
-
+
def makeNewImport(sym: Symbol): Context =
makeNewImport(gen.mkWildcardImport(sym))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3d2f86d54d..0ddacf7d36 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -215,7 +215,7 @@ trait Implicits {
object HasMethodMatching {
val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
-
+
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
@@ -571,7 +571,7 @@ trait Implicits {
else {
val tvars = undetParams map freshVar
def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
-
+
printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
@@ -594,7 +594,7 @@ trait Implicits {
// we must be conservative in leaving type params in undetparams
// prototype == WildcardType: want to remove all inferred Nothings
val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, tvars, targs)
-
+
val subst: TreeTypeSubstituter =
if (okParams.isEmpty) EmptyTreeTypeSubstituter
else {
@@ -621,7 +621,7 @@ trait Implicits {
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
}
-
+
if (context.hasErrors)
fail("typing TypeApply reported errors for the implicit tree")
else {
@@ -780,13 +780,13 @@ trait Implicits {
val newPending = undoLog undo {
is filterNot (alt => alt == i || {
try improves(i, alt)
- catch {
- case e: CyclicReference =>
+ catch {
+ case e: CyclicReference =>
if (printInfers) {
println(i+" discarded because cyclic reference occurred")
e.printStackTrace()
}
- true
+ true
}
})
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index e1aa8b46eb..c09e535117 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -210,9 +210,9 @@ trait Infer {
def getContext = context
def issue(err: AbsTypeError): Unit = context.issue(err)
-
- def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
-
+
+ def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -465,7 +465,7 @@ trait Infer {
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
-
+
foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
val retract = (
targ.typeSymbol == NothingClass // only retract Nothings
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 3ba8cefca8..088a56cd7b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -18,7 +18,7 @@ trait MethodSynthesis {
import global._
import definitions._
import CODE._
-
+
object synthesisUtil {
type M[T] = Manifest[T]
type CM[T] = ClassManifest[T]
@@ -39,7 +39,7 @@ trait MethodSynthesis {
typeRef(container.typeConstructor.prefix, container, args map (_.tpe))
}
-
+
def companionType[T](implicit m: M[T]) =
getRequiredModule(m.erasure.getName).tpe
@@ -71,7 +71,7 @@ trait MethodSynthesis {
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
-
+
def newMethodFlags(name: TermName) = {
val overrideFlag = if (isOverride(name)) OVERRIDE else 0L
overrideFlag | SYNTHETIC
@@ -344,7 +344,7 @@ trait MethodSynthesis {
if (mods.isDeferred)
tpt setOriginal tree.tpt
- // TODO - reconcile this with the DefDef creator in Trees (which
+ // TODO - reconcile this with the DefDef creator in Trees (which
// at this writing presented no way to pass a tree in for tpt.)
atPos(derivedSym.pos) {
DefDef(
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index eb7ea51d2b..82bcb93965 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -113,7 +113,7 @@ trait Namers extends MethodSynthesis {
private def contextFile = context.unit.source.file
private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
case ex: TypeError =>
- // H@ need to ensure that we handle only cyclic references
+ // H@ need to ensure that we handle only cyclic references
TypeSigError(tree, ex)
alt
}
@@ -284,7 +284,7 @@ trait Namers extends MethodSynthesis {
private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
sym.name.toTermName match {
case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
- case _ =>
+ case _ =>
log("[+symbol] " + sym.debugLocationString)
}
tree.symbol = sym
@@ -300,7 +300,7 @@ trait Namers extends MethodSynthesis {
val pos = tree.pos
val isParameter = tree.mods.isParameter
val flags = tree.mods.flags & mask
-
+
tree match {
case TypeDef(_, _, _, _) if isParameter => owner.newTypeParameter(name.toTypeName, pos, flags)
case TypeDef(_, _, _, _) => owner.newTypeSymbol(name.toTypeName, pos, flags)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 44a3abf1b2..e8d3b7a7de 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -428,11 +428,11 @@ trait NamesDefaults { self: Analyzer =>
}
} else NoSymbol
}
-
+
private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
val savedParams = context.extractUndetparams()
val savedReporting = context.ambiguousErrors
-
+
context.setAmbiguousErrors(false)
try fn(savedParams)
finally {
@@ -451,7 +451,7 @@ trait NamesDefaults { self: Analyzer =>
|| (ctx.owner.rawInfo.member(name) != NoSymbol)
)
)
-
+
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
@@ -519,7 +519,7 @@ trait NamesDefaults { self: Analyzer =>
def matchesName(param: Symbol) = !param.isSynthetic && (
(param.name == name) || (param.deprecatedParamName match {
case Some(`name`) =>
- context0.unit.deprecationWarning(arg.pos,
+ context0.unit.deprecationWarning(arg.pos,
"the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
true
case _ => false
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index 8bf5fc3557..b060fd7121 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -1160,7 +1160,7 @@ class Foo(x: Other) { x._1 } // no error in this order
def _match(n: Name): SelectStart = matchStrategy DOT n
private lazy val oneSig: Type =
- typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
+ typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 4a92458403..507ffd55d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -150,7 +150,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// Override checking ------------------------------------------------------------
-
+
def isJavaVarargsAncestor(clazz: Symbol) = (
clazz.isClass
&& clazz.isJavaDefined
@@ -167,14 +167,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
-
+
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
-
+
val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
-
+
val params = bridge.paramss.head
val elemtp = params.last.tpe.typeArgs.head
val idents = params map Ident
@@ -183,7 +183,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
localTyper typed DefDef(bridge, body)
}
-
+
// For all concrete non-private members that have a (Scala) repeated parameter:
// compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
@@ -203,7 +203,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
}
-
+
bridges.toList
}
else Nil
@@ -277,7 +277,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
*/
def checkOverride(member: Symbol, other: Symbol) {
debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
-
+
def memberTp = self.memberType(member)
def otherTp = self.memberType(other)
def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
@@ -337,7 +337,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def deferredCheck = member.isDeferred || !other.isDeferred
def subOther(s: Symbol) = s isSubClass other.owner
def subMember(s: Symbol) = s isSubClass member.owner
-
+
if (subOther(member.owner) && deferredCheck) {
//Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
return
@@ -430,12 +430,12 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
overrideTypeError();
- }
+ }
else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
val otherTp = self.memberInfo(other)
-
+
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
explainTypes(_.bounds containsType _, otherTp, memberTp)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 64f1662a22..5318268bf2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -334,7 +334,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
val selection = Select(This(clazz), protAcc)
def mkApply(fn: Tree) = Apply(fn, qual :: Nil)
- val res = atPos(tree.pos) {
+ val res = atPos(tree.pos) {
targs.head match {
case EmptyTree => mkApply(selection)
case _ => mkApply(TypeApply(selection, targs))
@@ -376,18 +376,18 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = hostForAccessorOf(field, currentOwner.enclClass)
assert(clazz != NoSymbol, field)
debuglog("Decided for host class: " + clazz)
-
+
val accName = nme.protSetterName(field.originalName)
val protectedAccessor = clazz.info decl accName orElse {
val protAcc = clazz.newMethod(accName, field.pos)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
val accessorType = MethodType(params, UnitClass.tpe)
-
+
protAcc setInfoAndEnter accessorType
val obj :: value :: Nil = params
storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value))))
-
+
protAcc
}
atPos(tree.pos)(Select(This(clazz), protectedAccessor))
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index c53b92c5be..7559b78db3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -115,7 +115,7 @@ trait SyntheticMethods extends ast.TreeDSL {
* def canEqual(that: Any) = that.isInstanceOf[This]
*/
def canEqualMethod: Tree = (
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
)
@@ -248,7 +248,7 @@ trait SyntheticMethods extends ast.TreeDSL {
}
if (phase.id > currentRun.typerPhase.id) templ
- else deriveTemplate(templ)(body =>
+ else deriveTemplate(templ)(body =>
if (clazz.isCase) caseTemplateBody()
else synthesize() match {
case Nil => body // avoiding unnecessary copy
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4f4087a953..1434002121 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -415,7 +415,7 @@ trait TypeDiagnostics {
"\nIf applicable, you may wish to try moving some members into another object."
)
}
-
+
/** Report a type error.
*
* @param pos0 The position where to report the error
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 0a1a385846..556c680cda 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -156,7 +156,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ErrorType =>
fun
}
-
+
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
inferView(tree, from, to, reportAmbiguous, true)
@@ -276,7 +276,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
tp match {
case TypeRef(pre, sym, args) =>
- checkNotLocked(sym) &&
+ checkNotLocked(sym) &&
((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym))
// @M! info for a type ref to a type parameter now returns a polytype
// @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
@@ -1097,7 +1097,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// Note: implicit arguments are still inferred (this kind of "chaining" is allowed)
)
}
-
+
def adaptToMember(qual: Tree, searchTemplate: Type): Tree =
adaptToMember(qual, searchTemplate, true, true)
def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree =
@@ -1112,12 +1112,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
case EmptyTree => qual
- case coercion =>
+ case coercion =>
if (settings.logImplicitConv.value)
unit.echo(qual.pos,
"applied implicit conversion from %s to %s = %s".format(
qual.tpe, searchTemplate, coercion.symbol.defString))
-
+
typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
}
}
@@ -2180,9 +2180,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
!e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
log("Double definition detected:\n " +
- ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
+ ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain)))
-
+
DefDefinedTwiceError(e.sym, e1.sym)
scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
@@ -2867,7 +2867,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
-
+
/** If we map a set of hidden symbols to their existential bounds, we
* have a problem: the bounds may themselves contain references to the
* hidden symbols. So this recursively calls existentialBound until
@@ -2894,7 +2894,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
})
}).toMap
}
-
+
/** Given a set `rawSyms` of term- and type-symbols, and a type
* `tp`, produce a set of fresh type parameters and a type so that
* it can be abstracted to an existential type. Every type symbol
@@ -2938,10 +2938,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def packSymbols(hidden: List[Symbol], tp: Type): Type =
if (hidden.isEmpty) tp
else existentialTransform(hidden, tp)(existentialAbstraction)
-
+
def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
- ctx.owner.isTerm &&
- (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
+ ctx.owner.isTerm &&
+ (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
{
var ctx1 = ctx.outer
while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
@@ -3893,7 +3893,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
reallyExists(sym) &&
((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
}
-
+
if (defSym == NoSymbol) {
var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
@@ -4389,7 +4389,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ReferenceToBoxed(idt @ Ident(_)) =>
val id1 = typed1(idt, mode, pt) match { case id: Ident => id }
- treeCopy.ReferenceToBoxed(tree, id1) setType AnyRefClass.tpe
+ treeCopy.ReferenceToBoxed(tree, id1) setType AnyRefClass.tpe
case Literal(value) =>
tree setType (
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 19b8632ed7..312958feca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -22,7 +22,7 @@ trait Unapplies extends ast.TreeDSL
import treeInfo.{ isRepeatedParamType, isByNameParamType }
private val unapplyParamName = nme.x_0
-
+
/** returns type list for return type of the extraction */
def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
assert(ufn.isMethod, ufn)
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 24c9926ad8..fbe92e5d84 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -71,10 +71,10 @@ object DocStrings {
* Every section starts with a `@` and extends to the next `@`, or
* to the end of the comment string, but excluding the final two
* characters which terminate the comment.
- *
- * Also take usecases into account - they need to expand until the next
- * usecase or the end of the string, as they might include other sections
- * of their own
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
*/
def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
@@ -84,10 +84,10 @@ object DocStrings {
idxs2 zip (idxs2.tail ::: List(str.length - 2))
}
}
-
+
/**
- * Merge sections following an usecase into the usecase comment, so they
- * can override the parent symbol's sections
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
*/
def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
idxs.find(str.substring(_).startsWith("@usecase")) match {
@@ -99,7 +99,7 @@ object DocStrings {
idxs
}
}
-
+
/** Does interval `iv` start with given `tag`?
*/
def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index f7c27dceb5..d1cdd30dd8 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -20,7 +20,7 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val typedSelectCount = new Counter
val typerNanos = new Timer
val classReadNanos = new Timer
-
+
val failedApplyNanos = new Timer
val failedOpEqNanos = new Timer
val failedSilentNanos = new Timer
diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala
index 5f152ecabb..0af34020a8 100644
--- a/src/compiler/scala/tools/util/EditDistance.scala
+++ b/src/compiler/scala/tools/util/EditDistance.scala
@@ -8,7 +8,7 @@ package util
object EditDistance {
import java.lang.Character.{ toLowerCase => lower }
-
+
def similarString(name: String, allowed: TraversableOnce[String]): String = {
val suggested = suggestions(name, allowed.toSeq, maxDistance = 1, maxSuggestions = 2)
if (suggested.isEmpty) ""
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 9930f28229..0382304bad 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -53,23 +53,23 @@ abstract class CPSAnnotationChecker extends CPSUtils {
if ((annots1 corresponds annots2)(_.atp <:< _.atp))
return true
- // Need to handle uninstantiated type vars specially:
-
+ // Need to handle uninstantiated type vars specially:
+
// g map (x => x) with expected type List[Int] @cps
// results in comparison ?That <:< List[Int] @cps
-
+
// Instantiating ?That to an annotated type would fail during
// transformation.
-
+
// Instead we force-compare tpe1 <:< tpe2.withoutAnnotations
// to trigger instantiation of the TypeVar to the base type
-
+
// This is a bit unorthodox (we're only supposed to look at
// annotations here) but seems to work.
-
+
if (!annots2.isEmpty && !tpe1.isGround)
return tpe1 <:< tpe2.withoutAnnotations
-
+
false
}
@@ -355,7 +355,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
}
case _ => Nil
}
-
+
val types = cpsParamAnnotation(t.tpe)
// TODO: check that it has been adapted and if so correctly
extra ++ (if (types.isEmpty) Nil else List(single(types)))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 8bbda5dd05..075009ce5e 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -12,7 +12,7 @@ trait CPSUtils {
var cpsEnabled = true
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
def vprintln(x: =>Any): Unit = if (verbose) println(x)
-
+
object cpsNames {
val catches = newTermName("$catches")
val ex = newTermName("$ex")
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 3d85f2f52f..80571943e5 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -55,7 +55,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
-
+
@deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
def this(initial: Int, names: String*) = {
this(initial)
@@ -201,7 +201,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
case _ => false
}
override def hashCode: Int = id.##
-
+
/** Create a ValueSet which contains this value and another one */
def + (v: Value) = ValueSet(this, v)
}
@@ -266,7 +266,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
* new array of longs */
def toBitMask: Array[Long] = nnIds.toBitMask
}
-
+
/** A factory object for value sets */
object ValueSet {
import generic.CanBuildFrom
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 508ef25e81..dceed26439 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -12,12 +12,12 @@ package scala
/** A function of 0 parameters.
- *
+ *
* In the following example, the definition of javaVersion is a
* shorthand for the anonymous class definition anonfun0:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val javaVersion = () => sys.props("java.version")
*
* val anonfun0 = new Function0[String] {
@@ -31,13 +31,13 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
trait Function0[@specialized +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(): R
-
+
override def toString() = "<function0>"
}
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 06936e54cb..8995ef912b 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 1 parameter.
- *
+ *
* In the following example, the definition of succ is a
* shorthand for the anonymous class definition anonfun1:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
* def apply(x: Int): Int = x + 1
@@ -29,7 +29,7 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends AnyRef { self =>
@@ -37,7 +37,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, s
* @return the result of function application.
*/
def apply(v1: T1): R
-
+
/** Composes two instances of Function1 in a new Function1, with this function applied last.
*
* @tparam A the type to which function `g` can be applied
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 1812f042e0..cacb96ef5d 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 2 parameters.
- *
+ *
* In the following example, the definition of max is a
* shorthand for the anonymous class definition anonfun2:
*
* {{{
- * object Main extends App {
+ * object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
@@ -30,7 +30,7 @@ package scala
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
-
+
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 70caff0221..3c5d6d0d23 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -26,18 +26,18 @@ package scala
*
* {{{
* val sample = 1 to 10
- * val isEven: PartialFunction[Int, String] = {
- * case x if x % 2 == 0 => x+" is even"
+ * val isEven: PartialFunction[Int, String] = {
+ * case x if x % 2 == 0 => x+" is even"
* }
*
* // the method collect can use isDefinedAt to select which members to collect
* val evenNumbers = sample collect isEven
*
- * val isOdd: PartialFunction[Int, String] = {
- * case x if x % 2 == 1 => x+" is odd"
+ * val isOdd: PartialFunction[Int, String] = {
+ * case x if x % 2 == 1 => x+" is odd"
* }
*
- * // the method orElse allows chaining another partial function to handle
+ * // the method orElse allows chaining another partial function to handle
* // input outside the declared domain
* val numbers = sample map (isEven orElse isOdd)
* }}}
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index 0106ad34ee..ab8b0a4505 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -23,7 +23,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
override def productArity = 1
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case _ => throw new IndexOutOfBoundsException(n.toString())
}
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index ca53b580c0..536fb2fed9 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -23,7 +23,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
override def productArity = 10
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 3d5942f3fa..7d49eccc5e 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -23,7 +23,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
override def productArity = 11
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 803193793c..0e9c4a01a2 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -23,7 +23,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
override def productArity = 12
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index 0c1d889624..a0629201d0 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -23,7 +23,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 13
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 0222309a0a..32dda81c3e 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -23,7 +23,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 14
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 41be7ec504..57851f9870 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -23,7 +23,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 15
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index accee3f965..75076f3b3c 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -23,7 +23,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 16
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index da80ae9a6b..9ee6072ffe 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -23,7 +23,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 17
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index ea25647762..25d0839af1 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -23,7 +23,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 18
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 5d4347c1a8..5464de7264 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -23,7 +23,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 19
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 4e6c70f463..8097245926 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -23,7 +23,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
override def productArity = 2
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index f23a0dee3a..b094e09aca 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -23,7 +23,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 20
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 4a4fe0697f..fa06cfb438 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -23,7 +23,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 21
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 7ee01b85ae..46038bf1a2 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -23,7 +23,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 22
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 23563c9e23..3a4cd8fc5e 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -23,7 +23,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
override def productArity = 3
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 4abaa9051b..a4d47457fa 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -23,7 +23,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
override def productArity = 4
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 9aa4af58b7..9f25e70af0 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -23,7 +23,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
override def productArity = 5
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 2ca1d7c31e..87fd318c68 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -23,7 +23,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
override def productArity = 6
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index b7af2d3e32..d074503315 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -23,7 +23,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
override def productArity = 7
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 17b5e48512..bd6150c235 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -23,7 +23,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
override def productArity = 8
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 784e9a7029..1f042944cc 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -23,7 +23,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
override def productArity = 9
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index 811a735110..67126b3069 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -16,7 +16,7 @@ trait Specializable extends SpecializableCompanion
object Specializable {
// No type parameter in @specialized annotation.
trait SpecializedGroup { }
-
+
// Smuggle a list of types by way of a tuple upon which Group is parameterized.
class Group[T >: Null](value: T) extends SpecializedGroup { }
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 6116547aa2..8ca312afc5 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -13,7 +13,7 @@ import collection.mutable.ArrayBuffer
/** A class to support string interpolation.
* This class supports string interpolation as outlined in Scala SIP-11.
* It needs to be fully documented once the SIP is accepted.
- *
+ *
* @param parts The parts that make up the interpolated string,
* without the expressions that get inserted by interpolation.
*/
@@ -26,13 +26,13 @@ case class StringContext(parts: String*) {
* @param `args` The arguments to be checked.
* @throws An `IllegalArgumentException` if this is not the case.
*/
- def checkLengths(args: Any*): Unit =
+ def checkLengths(args: Any*): Unit =
if (parts.length != args.length + 1)
throw new IllegalArgumentException("wrong number of arguments for interpolated string")
/** The simple string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* @param `args` The arguments to be inserted into the resulting string.
@@ -55,21 +55,21 @@ case class StringContext(parts: String*) {
}
/** The formatted string interpolator.
- *
+ *
* It inserts its arguments between corresponding parts of the string context.
* It also treats standard escape sequences as defined in the Scala specification.
* Finally, if an interpolated expression is followed by a `parts` string
* that starts with a formatting specifier, the expression is formatted according to that
* specifier. All specifiers allowed in Java format strings are handled, and in the same
* way they are treated in Java.
- *
+ *
* @param `args` The arguments to be inserted into the resulting string.
* @throws An `IllegalArgumentException`
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
* @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
- *
+ *
* Note: The `f` method works by assembling a format string from all the `parts` strings and using
* `java.lang.String.format` to format all arguments with that format string. The format string is
* obtained by concatenating all `parts` strings, and performing two transformations:
@@ -125,14 +125,14 @@ object StringContext {
* @param idx The index of the offending backslash character in `str`.
*/
class InvalidEscapeException(str: String, idx: Int)
- extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+ extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
/** Expands standard Scala escape sequences in a string.
* Escape sequences are:
* control: `\b`, `\t`, `\n`, `\f`, `\r`
* escape: `\\`, `\"`, `\'`
* octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
- *
+ *
* @param A string that may contain escape sequences
* @return The string with all escape sequences expanded.
*/
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 02fdd0cba5..6d31d35e51 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -19,5 +19,5 @@ case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
-
+
}
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index ba2a02a8b2..10d554d467 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -28,5 +28,5 @@ case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")"
-
+
}
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 7f51d172d4..2065e4f017 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -29,5 +29,5 @@ case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1:
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")"
-
+
}
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index 4bbc6a0eab..a463986752 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -31,5 +31,5 @@ case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")"
-
+
}
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 77bd59bf2e..2bee0d69ad 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -32,5 +32,5 @@ case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")"
-
+
}
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index bf7a4ce016..60f7c51e64 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -33,5 +33,5 @@ case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")"
-
+
}
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 582c359bc6..fc8e30580b 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -34,5 +34,5 @@ case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")"
-
+
}
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index a1e9a790ff..80181f6648 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -35,5 +35,5 @@ case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")"
-
+
}
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f531766c18..6236122be2 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -36,5 +36,5 @@ case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")"
-
+
}
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a96db25e4b..dd6a819ac5 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -37,5 +37,5 @@ case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")"
-
+
}
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 718280d68a..65f0fd22cf 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -38,5 +38,5 @@ case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")"
-
+
}
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index b1befca4fa..684d2266e8 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -23,7 +23,7 @@ case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @s
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
-
+
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 4a44c0bb89..cf3626909d 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -39,5 +39,5 @@ case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")"
-
+
}
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 580a169e39..78b9c585c6 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -40,5 +40,5 @@ case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")"
-
+
}
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index fd3392ddea..0993dfbbc3 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -41,5 +41,5 @@ case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 +
"," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")"
-
+
}
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 0d5399308b..dfa0c962a2 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -24,7 +24,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
+
@deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a859078bcf..a919072c88 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -22,5 +22,5 @@ case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
-
+
}
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 1edfb673ee..6a94f48ab4 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -23,5 +23,5 @@ case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T
extends Product5[T1, T2, T3, T4, T5]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")"
-
+
}
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5b74937e58..34f8224627 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -24,5 +24,5 @@ case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4,
extends Product6[T1, T2, T3, T4, T5, T6]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")"
-
+
}
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index a7f572e9f0..6fc3477ba2 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -25,5 +25,5 @@ case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4:
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")"
-
+
}
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 9bb427d689..1e21b684fc 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -26,5 +26,5 @@ case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")"
-
+
}
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 4d50539e0c..453cea31a1 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -27,5 +27,5 @@ case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")"
-
+
}
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 053cdba220..880b645daa 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -52,8 +52,8 @@ import java.util.logging.Level
// INFO lies between WARNING and FINE
% scalac -Xelide-below INFO example.scala && scala Test
Warning! Danger! Warning!
- I computed a value: 0
- }}}
+ I computed a value: 0
+ }}}
*
* @author Paul Phillips
* @since 2.8
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 02298ef096..b51a37cf9e 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -151,7 +151,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
def next(): Repr = {
if (!hasNext)
Iterator.empty.next
-
+
val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
val result = (self.newBuilder ++= forcedElms).result
var i = idxs.length - 2
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b235379575..cbbedc0231 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -11,12 +11,12 @@ package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
-/**
+/**
* @define Coll mutable.SortedSet
* @define coll mutable sorted
*
* @author Lucien Pereira
- *
+ *
*/
abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] {
@@ -26,7 +26,7 @@ abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with Sorted
* is evaluated elems is cloned (which is O(n)).
*
* Fortunately GrowingBuilder comes to rescue.
- *
+ *
*/
override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty)
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index abccd91f9c..870d5534dc 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -131,7 +131,7 @@ object BitSet extends BitSetFactory[BitSet] {
* the capacity of two long values). The constructor wraps an existing
* bit mask without copying, thus exposing a mutable part of the internal
* implementation. Care needs to be taken not to modify the exposed
- * array.
+ * array.
*/
class BitSetN(val elems: Array[Long]) extends BitSet {
protected def nwords = elems.length
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index e2a4a09938..381fcf3117 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -326,13 +326,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def head : B = hd
override def tail : List[B] = tl
override def isEmpty: Boolean = false
-
+
private def writeObject(out: ObjectOutputStream) {
out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
out.writeObject(this.hd)
out.writeObject(this.tl)
}
-
+
private def readObject(in: ObjectInputStream) {
val obj = in.readObject()
if (obj == ListSerializeStart) {
@@ -340,7 +340,7 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
this.tl = in.readObject().asInstanceOf[List[B]]
} else oldReadObject(in, obj)
}
-
+
/* The oldReadObject method exists here for compatibility reasons.
* :: objects used to be serialized by serializing all the elements to
* the output stream directly, but this was broken (see SI-5374).
@@ -359,13 +359,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
current = list
}
}
-
+
private def oldWriteObject(out: ObjectOutputStream) {
var xs: List[B] = this
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
}
-
+
}
/** $factoryInfo
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 7537558f0b..b72d83f896 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -77,9 +77,9 @@ extends collection.AbstractSeq[Int]
}
final val lastElement = start + (numRangeElements - 1) * step
final val terminalElement = start + numRangeElements * step
-
+
override def last = if (isEmpty) Nil.last else lastElement
-
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
@@ -93,14 +93,14 @@ extends collection.AbstractSeq[Int]
override def size = length
override def length = if (numRangeElements < 0) fail() else numRangeElements
-
+
private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
private def validateMaxLength() {
if (numRangeElements < 0)
fail()
}
-
+
def validateRangeBoundaries(f: Int => Any): Boolean = {
validateMaxLength()
@@ -121,7 +121,7 @@ extends collection.AbstractSeq[Int]
if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
else start + (step * idx)
}
-
+
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
if (validateRangeBoundaries(f)) {
var i = start
@@ -309,7 +309,7 @@ object Range {
// number of full-sized jumps.
val hasStub = isInclusive || (gap % step != 0)
val result: Long = jumps + ( if (hasStub) 1 else 0 )
-
+
if (result > scala.Int.MaxValue) -1
else result.toInt
}
@@ -405,4 +405,3 @@ object Range {
// super.foreach(f)
}
}
- \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index ba2af8f120..9aea25f330 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -12,9 +12,9 @@ package mutable
/**
* An immutable AVL Tree implementation used by mutable.TreeSet
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -28,28 +28,28 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
/**
* Return a new tree which not contains given element.
- *
+ *
*/
def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
throw new NoSuchElementException(String.valueOf(value))
-
+
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
-
+
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
@@ -90,7 +90,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Returns a new tree containing the given element.
* Thows an IllegalArgumentException if element is already present.
- *
+ *
*/
override def insert[B >: A](value: B, ordering: Ordering[B]) = {
val ord = ordering.compare(value, data)
@@ -104,7 +104,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a new tree which not contains given element.
- *
+ *
*/
override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
val ord = ordering.compare(value, data)
@@ -130,7 +130,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the smallest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMin[B >: A]: (B, AVLTree[B]) = {
if (Leaf == left)
@@ -144,7 +144,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
/**
* Return a tuple containing the biggest element of the provided tree
* and a new tree from which this element has been extracted.
- *
+ *
*/
override def removeMax[B >: A]: (B, AVLTree[B]) = {
if (Leaf == right)
@@ -154,7 +154,7 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
(max, Node(data, left, newRight).rebalance)
}
}
-
+
override def rebalance[B >: A] = {
if (-2 == balance) {
if (1 == left.balance)
diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala
index 699b96b87c..cbec118aa9 100644
--- a/src/library/scala/collection/mutable/Ctrie.scala
+++ b/src/library/scala/collection/mutable/Ctrie.scala
@@ -22,29 +22,29 @@ import annotation.switch
private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
import INodeBase._
-
+
WRITE(bn)
-
+
def this(g: Gen) = this(null, g)
-
+
@inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
-
+
@inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
-
+
final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct)
-
+
@inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = {
val m = /*READ*/mainnode
val prevval = /*READ*/m.prev
if (prevval eq null) m
else GCAS_Complete(m, ct)
}
-
+
@tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else {
// complete the GCAS
val prev = /*READ*/m.prev
val ctr = ct.readRoot(true)
-
+
prev match {
case null =>
m
@@ -71,7 +71,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
@inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = {
n.WRITE_PREV(old)
if (CAS(old, n)) {
@@ -79,27 +79,27 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
/*READ*/n.prev eq null
} else false
}
-
+
@inline private def inode(cn: MainNode[K, V]) = {
val nin = new INode[K, V](gen)
nin.WRITE(cn)
nin
}
-
+
final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = {
val nin = new INode[K, V](ngen)
val main = GCAS_READ(ct)
nin.WRITE(main)
nin
}
-
+
/** Inserts a key value pair, overwriting the old pair if the keys match.
- *
+ *
* @return true if successful, false otherwise
*/
@tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multiway node
val idx = (hc >>> lev) & 0x1f
@@ -137,7 +137,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
GCAS(ln, nn, ct)
}
}
-
+
/** Inserts a new key value pair, given that a specific condition is met.
*
* @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
@@ -145,7 +145,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*/
@tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multiway node
val idx = (hc >>> lev) & 0x1f
@@ -228,14 +228,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
/** Looks up the value associated with the key.
- *
+ *
* @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
*/
@tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] => // 1) a multinode
val idx = (hc >>> lev) & 0x1f
@@ -270,15 +270,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
ln.get(k).asInstanceOf[Option[AnyRef]].orNull
}
}
-
+
/** Removes the key associated with the given value.
- *
+ *
* @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
* @return null if not successful, an Option[V] indicating the previous value otherwise
*/
final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
-
+
m match {
case cn: CNode[K, V] =>
val idx = (hc >>> lev) & 0x1f
@@ -289,7 +289,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
val pos = Integer.bitCount(bmp & (flag - 1))
val sub = cn.array(pos)
val res = sub match {
- case in: INode[K, V] =>
+ case in: INode[K, V] =>
if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct)
else {
if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct)
@@ -301,7 +301,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
if (GCAS(cn, ncn, ct)) Some(sn.v) else null
} else None
}
-
+
if (res == None || (res eq null)) res
else {
@tailrec def cleanParent(nonlive: AnyRef) {
@@ -325,13 +325,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case _ => // parent is no longer a cnode, we're done
}
}
-
+
if (parent ne null) { // never tomb at root
val n = GCAS_READ(ct)
if (n.isInstanceOf[TNode[_, _]])
cleanParent(n)
}
-
+
res
}
}
@@ -351,7 +351,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
}
-
+
private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) {
val m = nd.GCAS_READ(ct)
m match {
@@ -359,14 +359,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case _ =>
}
}
-
+
final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null
-
+
final def cachedSize(ct: Ctrie[K, V]): Int = {
val m = GCAS_READ(ct)
m.cachedSize(ct)
}
-
+
/* this is a quiescent method! */
def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match {
case null => "<null>"
@@ -375,14 +375,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
case ln: LNode[_, _] => ln.string(lev)
case x => "<elem: %s>".format(x)
})
-
+
}
private[mutable] object INode {
val KEY_PRESENT = new AnyRef
val KEY_ABSENT = new AnyRef
-
+
def newRootNode[K, V] = {
val gen = new Gen
val cn = new CNode[K, V](0, new Array(0), gen)
@@ -393,11 +393,11 @@ private[mutable] object INode {
private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] {
WRITE_PREV(p)
-
+
def string(lev: Int) = throw new UnsupportedOperationException
-
+
def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException
-
+
override def toString = "FailedNode(%s)".format(p)
}
@@ -449,7 +449,7 @@ extends MainNode[K, V] {
private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
extends CNodeBase[K, V] {
-
+
// this should only be called from within read-only snapshots
final def cachedSize(ct: AnyRef) = {
val currsz = READ_SIZE()
@@ -460,7 +460,7 @@ extends CNodeBase[K, V] {
READ_SIZE()
}
}
-
+
// lends itself towards being parallelizable by choosing
// a random starting offset in the array
// => if there are concurrent size computations, they start
@@ -480,7 +480,7 @@ extends CNodeBase[K, V] {
}
sz
}
-
+
final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val narr = new Array[BasicNode](len)
@@ -488,7 +488,7 @@ extends CNodeBase[K, V] {
narr(pos) = nn
new CNode[K, V](bitmap, narr, gen)
}
-
+
final def removedAt(pos: Int, flag: Int, gen: Gen) = {
val arr = array
val len = arr.length
@@ -497,7 +497,7 @@ extends CNodeBase[K, V] {
Array.copy(arr, pos + 1, narr, pos, len - pos - 1)
new CNode[K, V](bitmap ^ flag, narr, gen)
}
-
+
final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
val len = array.length
val bmp = bitmap
@@ -507,7 +507,7 @@ extends CNodeBase[K, V] {
Array.copy(array, pos, narr, pos + 1, len - pos)
new CNode[K, V](bmp | flag, narr, gen)
}
-
+
/** Returns a copy of this cnode such that all the i-nodes below it are copied
* to the specified generation `ngen`.
*/
@@ -525,17 +525,17 @@ extends CNodeBase[K, V] {
}
new CNode[K, V](bitmap, narr, ngen)
}
-
+
private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match {
case tn: TNode[_, _] => tn.copyUntombed
case _ => inode
}
-
+
final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
case sn: SNode[K, V] => sn.copyTombed
case _ => this
} else this
-
+
// - if the branching factor is 1 for this CNode, and the child
// is a tombed SNode, returns its tombed version
// - otherwise, if there is at least one non-null node below,
@@ -559,12 +559,12 @@ extends CNodeBase[K, V] {
}
i += 1
}
-
+
new CNode[K, V](bmp, tmparray, gen).toContracted(lev)
}
-
+
private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
-
+
/* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
protected def collectElems: Seq[(K, V)] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair)
@@ -574,12 +574,12 @@ extends CNodeBase[K, V] {
case cn: CNode[K, V] => cn.collectElems
}
}
-
+
protected def collectLocalElems: Seq[String] = array flatMap {
case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
}
-
+
override def toString = {
val elems = collectLocalElems
"CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", "))
@@ -588,7 +588,7 @@ extends CNodeBase[K, V] {
private[mutable] object CNode {
-
+
def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) {
val xidx = (xhc >>> lev) & 0x1f
val yidx = (yhc >>> lev) & 0x1f
@@ -604,7 +604,7 @@ private[mutable] object CNode {
} else {
new LNode(x.k, x.v, y.k, y.v)
}
-
+
}
@@ -620,9 +620,9 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai
* lock-free snapshots which are used to implement linearizable lock-free size,
* iterator and clear operations. The cost of evaluating the (lazy) snapshot is
* distributed across subsequent updates, thus making snapshot evaluation horizontally scalable.
- *
+ *
* For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf
- *
+ *
* @author Aleksandar Prokopec
* @since 2.10
*/
@@ -634,17 +634,17 @@ extends ConcurrentMap[K, V]
with Serializable
{
import Ctrie.computeHash
-
+
private var rootupdater = rtupd
@volatile var root = r
-
+
def this() = this(
INode.newRootNode,
AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
)
-
+
/* internal methods */
-
+
private def writeObject(out: java.io.ObjectOutputStream) {
val it = iterator
while (it.hasNext) {
@@ -654,11 +654,11 @@ extends ConcurrentMap[K, V]
}
out.writeObject(CtrieSerializationEnd)
}
-
+
private def readObject(in: java.io.ObjectInputStream) {
root = INode.newRootNode
rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
-
+
var obj: AnyRef = null
do {
obj = in.readObject()
@@ -669,11 +669,11 @@ extends ConcurrentMap[K, V]
}
} while (obj != CtrieSerializationEnd)
}
-
+
@inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
-
+
final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
-
+
@inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
val r = /*READ*/root
r match {
@@ -681,7 +681,7 @@ extends ConcurrentMap[K, V]
case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort)
}
}
-
+
@tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = {
val v = /*READ*/root
v match {
@@ -705,7 +705,7 @@ extends ConcurrentMap[K, V]
}
}
}
-
+
private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
val desc = RDCSS_Descriptor(ov, expectedmain, nv)
if (CAS_ROOT(ov, desc)) {
@@ -713,27 +713,27 @@ extends ConcurrentMap[K, V]
/*READ*/desc.committed
} else false
}
-
+
@tailrec private def inserthc(k: K, hc: Int, v: V) {
val r = RDCSS_READ_ROOT()
if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v)
}
-
+
@tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = {
val r = RDCSS_READ_ROOT()
-
+
val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this)
if (ret eq null) insertifhc(k, hc, v, cond)
else ret
}
-
+
@tailrec private def lookuphc(k: K, hc: Int): AnyRef = {
val r = RDCSS_READ_ROOT()
val res = r.rec_lookup(k, hc, 0, null, r.gen, this)
if (res eq INodeBase.RESTART) lookuphc(k, hc)
else res
}
-
+
/* slower:
//@tailrec
private def lookuphc(k: K, hc: Int): AnyRef = {
@@ -746,31 +746,31 @@ extends ConcurrentMap[K, V]
}
}
*/
-
+
@tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = {
val r = RDCSS_READ_ROOT()
val res = r.rec_remove(k, v, hc, 0, null, r.gen, this)
if (res ne null) res
else removehc(k, v, hc)
}
-
+
def string = RDCSS_READ_ROOT().string(0)
-
+
/* public methods */
-
+
override def seq = this
-
+
override def par = new ParCtrie(this)
-
+
override def empty: Ctrie[K, V] = new Ctrie[K, V]
-
+
final def isReadOnly = rootupdater eq null
-
+
final def nonReadOnly = rootupdater ne null
-
+
/** Returns a snapshot of this Ctrie.
* This operation is lock-free and linearizable.
- *
+ *
* The snapshot is lazily updated - the first time some branch
* in the snapshot or this Ctrie are accessed, they are rewritten.
* This means that the work of rebuilding both the snapshot and this
@@ -783,17 +783,17 @@ extends ConcurrentMap[K, V]
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater)
else snapshot()
}
-
+
/** Returns a read-only snapshot of this Ctrie.
* This operation is lock-free and linearizable.
- *
+ *
* The snapshot is lazily updated - the first time some branch
* of this Ctrie are accessed, it is rewritten. The work of creating
* the snapshot is thus distributed across subsequent updates
* and accesses on this Ctrie by all threads.
* Note that the snapshot itself is never rewritten unlike when calling
* the `snapshot` method, but the obtained snapshot cannot be modified.
- *
+ *
* This method is used by other methods such as `size` and `iterator`.
*/
@tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
@@ -802,106 +802,106 @@ extends ConcurrentMap[K, V]
if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null)
else readOnlySnapshot()
}
-
+
@tailrec final override def clear() {
val r = RDCSS_READ_ROOT()
if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
}
-
+
final def lookup(k: K): V = {
val hc = computeHash(k)
lookuphc(k, hc).asInstanceOf[V]
}
-
+
final override def apply(k: K): V = {
val hc = computeHash(k)
val res = lookuphc(k, hc)
if (res eq null) throw new NoSuchElementException
else res.asInstanceOf[V]
}
-
+
final def get(k: K): Option[V] = {
val hc = computeHash(k)
Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
}
-
+
override def put(key: K, value: V): Option[V] = {
val hc = computeHash(key)
insertifhc(key, hc, value, null)
}
-
+
final override def update(k: K, v: V) {
val hc = computeHash(k)
inserthc(k, hc, v)
}
-
+
final def +=(kv: (K, V)) = {
update(kv._1, kv._2)
this
}
-
+
final override def remove(k: K): Option[V] = {
val hc = computeHash(k)
removehc(k, null.asInstanceOf[V], hc)
}
-
+
final def -=(k: K) = {
remove(k)
this
}
-
+
def putIfAbsent(k: K, v: V): Option[V] = {
val hc = computeHash(k)
insertifhc(k, hc, v, INode.KEY_ABSENT)
}
-
+
def remove(k: K, v: V): Boolean = {
val hc = computeHash(k)
removehc(k, v, hc).nonEmpty
}
-
+
def replace(k: K, oldvalue: V, newvalue: V): Boolean = {
val hc = computeHash(k)
insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty
}
-
+
def replace(k: K, v: V): Option[V] = {
val hc = computeHash(k)
insertifhc(k, hc, v, INode.KEY_PRESENT)
}
-
+
def iterator: Iterator[(K, V)] =
if (nonReadOnly) readOnlySnapshot().iterator
else new CtrieIterator(0, this)
-
+
private def cachedSize() = {
val r = RDCSS_READ_ROOT()
r.cachedSize(this)
}
-
+
override def size: Int =
if (nonReadOnly) readOnlySnapshot().size
else cachedSize()
-
+
override def stringPrefix = "Ctrie"
-
+
}
object Ctrie extends MutableMapFactory[Ctrie] {
val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
-
+
implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V]
-
+
def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V]
-
+
@inline final def computeHash[K](k: K): Int = {
var hcode = k.hashCode
hcode = hcode * 0x9e3775cd
hcode = java.lang.Integer.reverseBytes(hcode)
hcode * 0x9e3775cd
}
-
+
}
@@ -911,11 +911,11 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
var depth = -1
var subiter: Iterator[(K, V)] = null
var current: KVNode[K, V] = null
-
+
if (mustInit) initialize()
-
+
def hasNext = (current ne null) || (subiter ne null)
-
+
def next() = if (hasNext) {
var r: (K, V) = null
if (subiter ne null) {
@@ -927,7 +927,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
}
r
} else Iterator.empty.next()
-
+
private def readin(in: INode[K, V]) = in.gcasRead(ct) match {
case cn: CNode[K, V] =>
depth += 1
@@ -942,19 +942,19 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
case null =>
current = null
}
-
+
@inline private def checkSubiter() = if (!subiter.hasNext) {
subiter = null
advance()
}
-
+
@inline private def initialize() {
assert(ct.isReadOnly)
-
+
val r = ct.RDCSS_READ_ROOT()
readin(r)
}
-
+
def advance(): Unit = if (depth >= 0) {
val npos = stackpos(depth) + 1
if (npos < stack(depth).length) {
@@ -970,19 +970,19 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
advance()
}
} else current = null
-
+
protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit)
-
+
protected def dupTo(it: CtrieIterator[K, V]) = {
it.level = this.level
it.ct = this.ct
it.depth = this.depth
it.current = this.current
-
+
// these need a deep copy
Array.copy(this.stack, 0, it.stack, 0, 7)
Array.copy(this.stackpos, 0, it.stackpos, 0, 7)
-
+
// this one needs to be evaluated
if (this.subiter == null) it.subiter = null
else {
@@ -991,7 +991,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
it.subiter = lst.iterator
}
}
-
+
/** Returns a sequence of iterators over subsets of this iterator.
* It's used to ease the implementation of splitters for a parallel version of the Ctrie.
*/
@@ -1026,7 +1026,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
this.level += 1
Seq(this)
}
-
+
def printDebug {
println("ctrie iterator")
println(stackpos.mkString(","))
@@ -1034,7 +1034,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
println("curr.: " + current)
println(stack.mkString("\n"))
}
-
+
}
@@ -1048,20 +1048,20 @@ private[mutable] case object CtrieSerializationEnd
private[mutable] object Debug {
import collection._
-
+
lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
-
+
def log(s: AnyRef) = logbuffer.add(s)
-
+
def flush() {
for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
logbuffer.clear()
}
-
+
def clear() {
logbuffer.clear()
}
-
+
}
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f3fb6738eb..ee6d4d1d22 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -43,19 +43,19 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** The array keeping track of number of elements in 32 element blocks.
*/
@transient protected var sizemap: Array[Int] = null
-
+
@transient var seedvalue: Int = tableSizeSeed
-
+
import HashTable.powerOfTwo
-
+
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
-
+
private def initialCapacity = capacity(initialSize)
-
+
protected def randomSeed = seedGenerator.get.nextInt()
-
+
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
-
+
/**
* Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
@@ -65,22 +65,22 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
in.defaultReadObject
-
+
_loadFactor = in.readInt()
assert(_loadFactor > 0)
-
+
val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
threshold = newThreshold(_loadFactor, table.size)
-
+
seedvalue = in.readInt()
-
+
val smDefined = in.readBoolean()
if (smDefined) sizeMapInit(table.length) else sizemap = null
-
+
var index = 0
while (index < size) {
val elem = in.readObject().asInstanceOf[A]
@@ -295,12 +295,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
protected final def index(hcode: Int) = {
// version 1 (no longer used - did not work with parallel hash tables)
// improve(hcode) & (table.length - 1)
-
+
// version 2 (allows for parallel hash table construction)
val improved = improve(hcode, seedvalue)
val ones = table.length - 1
(improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
-
+
// version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables)
// val hc = improve(hcode)
// val bbp = blockbitpos
@@ -345,17 +345,17 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private[collection] object FlatHashTable {
-
+
/** Creates a specific seed to improve hashcode of a hash table instance
* and ensure that iteration order vulnerabilities are not 'felt' in other
* hash tables.
- *
+ *
* See SI-5293.
*/
final def seedGenerator = new ThreadLocal[util.Random] {
override def initialValue = new util.Random
}
-
+
/** The load factor for the hash table; must be < 500 (0.5)
*/
def defaultLoadFactor: Int = 450
@@ -396,11 +396,11 @@ private[collection] object FlatHashTable {
//h = h ^ (h >>> 14)
//h = h + (h << 4)
//h ^ (h >>> 10)
-
+
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
val improved = i * 0x9e3775cd
-
+
// for the remainder, see SI-5293
// to ensure that different bits are used for different hash tables, we have to rotate based on the seed
val rotation = seed % 32
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 5b3e07b826..cc0aed6963 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -53,9 +53,9 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
@transient protected var sizemap: Array[Int] = null
@transient var seedvalue: Int = tableSizeSeed
-
+
protected def tableSizeSeed = Integer.bitCount(table.length - 1)
-
+
protected def initialSize: Int = HashTable.initialSize
private def lastPopulatedIndex = {
@@ -80,9 +80,9 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
seedvalue = in.readInt()
-
+
val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
@@ -429,7 +429,7 @@ private[collection] object HashTable {
// h = h ^ (h >>> 14)
// h = h + (h << 4)
// h ^ (h >>> 10)
-
+
// the rest of the computation is due to SI-5293
val rotation = seed % 32
val rotated = (i >>> rotation) | (i << (32 - rotation))
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 53c876ec08..037f3b2939 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -62,22 +62,22 @@ final class ListBuffer[A]
private var len = 0
protected def underlying: immutable.Seq[A] = start
-
+
private def writeObject(out: ObjectOutputStream) {
// write start
var xs: List[A] = start
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
-
+
// no need to write last0
-
+
// write if exported
out.writeBoolean(exported)
-
+
// write the length
out.writeInt(len)
}
-
+
private def readObject(in: ObjectInputStream) {
// read start, set last0 appropriately
var elem: A = in.readObject.asInstanceOf[A]
@@ -97,14 +97,14 @@ final class ListBuffer[A]
last0 = current
start
}
-
+
// read if exported
exported = in.readBoolean()
-
+
// read the length
len = in.readInt()
}
-
+
/** The current length of the buffer.
*
* This operation takes constant time.
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index d87fc0b4a2..f41a51d3ef 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -13,12 +13,12 @@ import generic._
/**
* Base trait for mutable sorted set.
- *
+ *
* @define Coll mutable.SortedSet
* @define coll mutable sorted set
*
* @author Lucien Pereira
- *
+ *
*/
trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
@@ -39,11 +39,11 @@ trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike
* Standard `CanBuildFrom` instance for sorted sets.
*
* @author Lucien Pereira
- *
+ *
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
-
+
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
-
+
}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index e0f1c3adfe..02ee811193 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -11,14 +11,14 @@ package mutable
import generic._
-/**
+/**
* @define Coll mutable.TreeSet
* @define coll mutable tree set
* @factoryInfo
* Companion object of TreeSet providing factory related utilities.
- *
+ *
* @author Lucien Pereira
- *
+ *
*/
object TreeSet extends MutableSortedSetFactory[TreeSet] {
/**
@@ -32,7 +32,7 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
* A mutable SortedSet using an immutable AVL Tree as underlying data structure.
*
* @author Lucien Pereira
- *
+ *
*/
class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
@@ -67,7 +67,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Cardinality store the set size, unfortunately a
* set view (given by rangeImpl)
* cannot take advantage of this optimisation
- *
+ *
*/
override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
@@ -101,7 +101,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
* Thanks to the immutable nature of the
* underlying AVL Tree, we can share it with
* the clone. So clone complexity in time is O(1).
- *
+ *
*/
override def clone: TreeSet[A] = {
val clone = new TreeSet[A](base, from, until)
@@ -119,5 +119,5 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S
override def iterator: Iterator[A] = resolve.avl.iterator
.dropWhile(e => !isLeftAcceptable(from, ordering)(e))
.takeWhile(e => isRightAcceptable(until, ordering)(e))
-
+
}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index e304be92ae..54cdf25804 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -61,14 +61,14 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* @return the parallel builder containing both the elements of this and the `other` builder
*/
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
-
+
/** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared
* across several threads constructing the collection.
- *
+ *
* By default, this method returns `false`.
*/
def canBeShared: Boolean = false
-
+
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 7c5a835e56..c0fc906ad9 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -154,9 +154,9 @@ extends GenIterableLike[T, Repr]
with HasNewCombiner[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
-
+
import tasksupport._
-
+
def seq: Sequential
def repr: Repr = this.asInstanceOf[Repr]
@@ -164,7 +164,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def hasDefiniteSize = true
def nonEmpty = size != 0
-
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
@@ -234,7 +234,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
trait SignallingOps[PI <: DelegatedSignalling] {
def assign(cntx: Signalling): PI
}
-
+
/* convenience task operations wrapper */
protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] {
def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) {
@@ -262,7 +262,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
it
}
}
-
+
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
@@ -272,12 +272,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
def isCombiner = cb.isInstanceOf[Combiner[_, _]]
def asCombiner = cb.asInstanceOf[Combiner[Elem, To]]
}
-
+
protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass`
def apply() = bf.apply()
}
-
+
protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr]
def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
@@ -287,7 +287,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def mkString: String = seq.mkString("")
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
-
+
def canEqual(other: Any) = true
/** Reduces the elements of this sequence using the specified associative binary operator.
@@ -324,7 +324,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements if the collection is nonempty, and `None` otherwise.
*/
def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
-
+
/** Folds the elements of this sequence using the specified associative binary operator.
* The order in which the elements are reduced is unspecified and may be nondeterministic.
*
@@ -375,11 +375,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
-
+
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
-
+
def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
-
+
def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op)
def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op)
@@ -428,7 +428,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
-
+
def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result })
} else seq.map(f)(bf2seq(bf))
@@ -486,11 +486,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def find(pred: T => Boolean): Option[T] = {
executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
-
+
/** Creates a combiner factory. Each combiner factory instance is used
* once per invocation of a parallel transformer method for a single
* collection.
- *
+ *
* The default combiner factory creates a new combiner every time it
* is requested, unless the combiner is thread-safe as indicated by its
* `canBeShared` method. In this case, the method returns a factory which
@@ -509,7 +509,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def doesShareCombiners = false
}
}
-
+
protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = {
val combiner = cbf()
if (combiner.canBeShared) new CombinerFactory[S, That] {
@@ -521,7 +521,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def doesShareCombiners = false
}
}
-
+
def filter(pred: T => Boolean): Repr = {
executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.result })
}
@@ -875,9 +875,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
}
-
+
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
-
+
protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
@@ -894,7 +894,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Count) = result = result + that.result
// override def toString = "CountTask(" + pittxt + ")"
}
-
+
protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
@@ -1303,7 +1303,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = that.result
override def requiresStrictSplitters = true
}
-
+
protected[this] class FromScanTree[U >: T, That]
(tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
@@ -1379,13 +1379,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
def rightmost = this
def print(depth: Int) = println((" " * depth) + this)
}
-
+
/* alias methods */
-
+
def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
-
+
def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
-
+
/* debug information */
private[parallel] def debugInformation = "Parallel collection: " + this.getClass
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 6a5ee5c69b..70529229ec 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -182,9 +182,9 @@ self =>
} otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence.
- *
+ *
* $abortsignalling
- *
+ *
* @tparam S the type of the elements of `that` sequence
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 8ed4583419..c5910ff2c8 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -28,7 +28,7 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
* This method doesn't change the state of the iterator.
*/
def remaining: Int
-
+
/** For most collections, this is a cheap operation.
* Exceptions can override this method.
*/
@@ -386,22 +386,22 @@ extends AugmentedIterableIterator[T]
with DelegatedSignalling
{
self =>
-
+
var signalDelegate: Signalling = IdleSignalling
-
+
/** Creates a copy of this iterator. */
def dup: IterableSplitter[T]
def split: Seq[IterableSplitter[T]]
-
+
def splitWithSignalling: Seq[IterableSplitter[T]] = {
val pits = split
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel)
-
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
*
@@ -554,13 +554,13 @@ self =>
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = {
val pits = psplit(sizes: _*)
pits foreach { _.signalDelegate = signalDelegate }
pits
}
-
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
* method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index b705909cad..e643444638 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -171,12 +171,12 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def internal() = {
var last = spawnSubtasks()
-
+
last.body.tryLeaf(None)
last.release()
body.result = last.body.result
body.throwable = last.body.throwable
-
+
while (last.next != null) {
// val lastresult = Option(last.body.result)
val beforelast = last
@@ -193,7 +193,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
body.tryMerge(last.body.repr)
}
}
-
+
def spawnSubtasks() = {
var last: TaskImpl[R, Tp] = null
var head: TaskImpl[R, Tp] = this
@@ -237,7 +237,7 @@ trait ThreadPoolTasks extends Tasks {
// utb: var future: Future[_] = null
@volatile var owned = false
@volatile var completed = false
-
+
def start() = synchronized {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
@@ -326,7 +326,7 @@ trait ThreadPoolTasks extends Tasks {
// debuglog("-----------> Executing with wait: " + task)
t.start()
-
+
t.sync()
t.body.forwardThrowable
t.body.result
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
index cec2e6886d..80add2407b 100644
--- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -25,11 +25,11 @@ import scala.collection.mutable.CtrieIterator
/** Parallel Ctrie collection.
- *
+ *
* It has its bulk operations parallelized, but uses the snapshot operation
* to create the splitter. This means that parallel bulk operations can be
* called concurrently with the modifications.
- *
+ *
* @author Aleksandar Prokopec
* @since 2.10
*/
@@ -41,41 +41,41 @@ extends ParMap[K, V]
with Serializable
{
import collection.parallel.tasksupport._
-
+
def this() = this(new Ctrie)
-
+
override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie
-
+
override def empty: ParCtrie[K, V] = ParCtrie.empty
-
+
protected[this] override def newCombiner = ParCtrie.newCombiner
-
+
override def seq = ctrie
-
+
def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true)
-
+
override def clear() = ctrie.clear()
-
+
def result = this
-
+
def get(key: K): Option[V] = ctrie.get(key)
-
+
def put(key: K, value: V): Option[V] = ctrie.put(key, value)
-
+
def update(key: K, value: V): Unit = ctrie.update(key, value)
-
+
def remove(key: K): Option[V] = ctrie.remove(key)
-
+
def +=(kv: (K, V)): this.type = {
ctrie.+=(kv)
this
}
-
+
def -=(key: K): this.type = {
ctrie.-=(key)
this
}
-
+
override def size = {
val in = ctrie.readRoot()
val r = in.gcasRead(ctrie)
@@ -87,11 +87,11 @@ extends ParMap[K, V]
cn.cachedSize(ctrie)
}
}
-
+
override def stringPrefix = "ParCtrie"
-
+
/* tasks */
-
+
/** Computes Ctrie size in parallel. */
class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
var result = -1
@@ -115,7 +115,7 @@ extends ParMap[K, V]
def shouldSplitFurther = howmany > 1
override def merge(that: Size) = result = result + that.result
}
-
+
}
@@ -126,63 +126,63 @@ extends CtrieIterator[K, V](lev, ct, mustInit)
// only evaluated if `remaining` is invoked (which is not used by most tasks)
lazy val totalsize = ct.par.size
var iterated = 0
-
+
protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit)
-
+
override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
level < maxsplits
}
-
+
def dup = {
val it = newIterator(0, ct, false)
dupTo(it)
it.iterated = this.iterated
it
}
-
+
override def next() = {
iterated += 1
super.next()
}
-
+
def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]]
-
+
override def isRemainingCheap = false
-
+
def remaining: Int = totalsize - iterated
}
/** Only used within the `ParCtrie`. */
private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] {
-
+
def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
-
+
val thiz = this.asInstanceOf[ParCtrie[K, V]]
val that = other.asInstanceOf[ParCtrie[K, V]]
val result = new ParCtrie[K, V]
-
+
result ++= thiz.iterator
result ++= that.iterator
-
+
result
}
-
+
override def canBeShared = true
-
+
}
-
+
object ParCtrie extends ParMapFactory[ParCtrie] {
-
+
def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V]
-
+
def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V]
-
+
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V]
-
+
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 6c5f513ad0..0217d0bfa8 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -117,7 +117,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
private var seedvalue = 27
-
+
def +=(elem: T) = {
sz += 1
val hc = improve(elemHashCode(elem), seedvalue)
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 8f19d0ecdb..f5e05ffefb 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -114,7 +114,7 @@ package parallel {
}
/* classes */
-
+
trait CombinerFactory[U, Repr] {
/** Provides a combiner used to construct a collection. */
def apply(): Combiner[U, Repr]
@@ -126,7 +126,7 @@ package parallel {
*/
def doesShareCombiners: Boolean
}
-
+
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
final case class CompositeThrowable(
val throwables: Set[Throwable]
@@ -201,18 +201,18 @@ package parallel {
//self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
protected var sz: Int = 0
-
+
def size = sz
-
+
def clear() = {
buckets = new Array[UnrolledBuffer[Buck]](bucketnumber)
sz = 0
}
-
+
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
-
+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
-
+
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
if (this eq other) this
else other match {
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index e79f76430f..f6d6341151 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -23,7 +23,7 @@ class Channel[A] {
private var written = new LinkedList[A] // FIFO buffer, realized through
private var lastWritten = written // aliasing of a linked list
private var nreaders = 0
-
+
/**
* @param x ...
*/
@@ -33,7 +33,7 @@ class Channel[A] {
lastWritten = lastWritten.next
if (nreaders > 0) notify()
}
-
+
def read: A = synchronized {
while (written.next == null) {
try {
@@ -46,5 +46,5 @@ class Channel[A] {
written = written.next
x
}
-
+
}
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index 6aacd53de2..ae17c7e032 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -18,16 +18,16 @@ abstract class ConcurrentPackageObject {
*/
lazy val executionContext =
new impl.ExecutionContextImpl(java.util.concurrent.Executors.newCachedThreadPool())
-
+
/** A global service for scheduling tasks for execution.
*/
// lazy val scheduler =
// new default.SchedulerImpl
-
+
val handledFutureException: PartialFunction[Throwable, Throwable] = {
case t: Throwable if isFutureThrowable(t) => t
}
-
+
// TODO rename appropriately and make public
private[concurrent] def isFutureThrowable(t: Throwable) = t match {
case e: Error => false
@@ -35,7 +35,7 @@ abstract class ConcurrentPackageObject {
case i: InterruptedException => false
case _ => true
}
-
+
private[concurrent] def resolve[T](source: Try[T]): Try[T] = source match {
case Failure(t: scala.runtime.NonLocalReturnControl[_]) => Success(t.value.asInstanceOf[T])
case Failure(t: scala.util.control.ControlThrowable) => Failure(new ExecutionException("Boxed ControlThrowable", t))
@@ -46,24 +46,24 @@ abstract class ConcurrentPackageObject {
private[concurrent] def resolver[T] =
resolverFunction.asInstanceOf[PartialFunction[Throwable, Try[T]]]
-
+
/* concurrency constructs */
-
+
def future[T](body: =>T)(implicit execCtx: ExecutionContext = executionContext): Future[T] =
execCtx future body
-
+
def promise[T]()(implicit execCtx: ExecutionContext = executionContext): Promise[T] =
execCtx promise
-
+
/** Wraps a block of code into an awaitable object. */
def body2awaitable[T](body: =>T) = new Awaitable[T] {
def await(atMost: Duration)(implicit cb: CanAwait) = body
}
-
+
/** Used to block on a piece of code which potentially blocks.
- *
+ *
* @param body A piece of code which contains potentially blocking or long running calls.
- *
+ *
* Calling this method may throw the following exceptions:
* - CancellationException - if the computation was cancelled
* - InterruptedException - in the case that a wait within the blockable object was interrupted
@@ -71,11 +71,11 @@ abstract class ConcurrentPackageObject {
*/
def blocking[T](atMost: Duration)(body: =>T)(implicit execCtx: ExecutionContext): T =
executionContext.blocking(atMost)(body)
-
+
/** Blocks on an awaitable object.
- *
+ *
* @param awaitable An object with a `block` method which runs potentially blocking or long running calls.
- *
+ *
* Calling this method may throw the following exceptions:
* - CancellationException - if the computation was cancelled
* - InterruptedException - in the case that a wait within the blockable object was interrupted
@@ -83,7 +83,7 @@ abstract class ConcurrentPackageObject {
*/
def blocking[T](awaitable: Awaitable[T], atMost: Duration)(implicit execCtx: ExecutionContext = executionContext): T =
executionContext.blocking(awaitable, atMost)
-
+
@inline implicit final def int2durationops(x: Int): DurationOps = new DurationOps(x)
}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index 0b7f54a27a..a17153bad5 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -26,23 +26,23 @@ package scala.concurrent
class DelayedLazyVal[T](f: () => T, body: => Unit) {
@volatile private[this] var _isDone = false
private[this] lazy val complete = f()
-
+
/** Whether the computation is complete.
*
* @return true if the computation is complete.
*/
def isDone = _isDone
-
+
/** The current result of f(), or the final result if complete.
*
* @return the current value
*/
def apply(): T = if (isDone) complete else f()
-
+
// TODO replace with scala.concurrent.future { ... }
ops.future {
body
_isDone = true
}
-
+
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 99cd264ac5..eb1b3355c0 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -21,41 +21,41 @@ import collection._
trait ExecutionContext {
-
+
protected implicit object CanAwaitEvidence extends CanAwait
-
+
def execute(runnable: Runnable): Unit
-
+
def execute[U](body: () => U): Unit
-
+
def promise[T]: Promise[T]
-
+
def future[T](body: Callable[T]): Future[T] = future(body.call())
-
+
def future[T](body: => T): Future[T]
-
+
def blocking[T](atMost: Duration)(body: =>T): T
-
+
def blocking[T](awaitable: Awaitable[T], atMost: Duration): T
-
+
def reportFailure(t: Throwable): Unit
-
+
/* implementations follow */
-
+
private implicit val executionContext = this
-
+
def keptPromise[T](result: T): Promise[T] = {
val p = promise[T]
p success result
}
-
+
def brokenPromise[T](t: Throwable): Promise[T] = {
val p = promise[T]
p failure t
}
-
+
/** TODO some docs
- *
+ *
*/
def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]]): Future[Coll[T]] = {
import nondeterministic._
@@ -63,13 +63,13 @@ trait ExecutionContext {
val counter = new AtomicInteger(1) // how else could we do this?
val p: Promise[Coll[T]] = promise[Coll[T]] // we need an implicit execctx in the signature
var idx = 0
-
+
def tryFinish() = if (counter.decrementAndGet() == 0) {
val builder = cbf(futures)
builder ++= buffer
p success builder.result
}
-
+
for (f <- futures) {
val currentIndex = idx
buffer += null.asInstanceOf[T]
@@ -83,46 +83,46 @@ trait ExecutionContext {
}
idx += 1
}
-
+
tryFinish()
-
+
p.future
}
-
+
/** TODO some docs
- *
+ *
*/
def any[T](futures: Traversable[Future[T]]): Future[T] = {
val p = promise[T]
val completeFirst: Try[T] => Unit = elem => p tryComplete elem
-
+
futures foreach (_ onComplete completeFirst)
-
+
p.future
}
-
+
/** TODO some docs
- *
+ *
*/
def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean): Future[Option[T]] = {
if (futures.isEmpty) Promise.kept[Option[T]](None).future
else {
val result = promise[Option[T]]
val count = new AtomicInteger(futures.size)
- val search: Try[T] => Unit = {
+ val search: Try[T] => Unit = {
v => v match {
case Success(r) => if (predicate(r)) result trySuccess Some(r)
case _ =>
}
if (count.decrementAndGet() == 0) result trySuccess None
}
-
+
futures.foreach(_ onComplete search)
result.future
}
}
-
+
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 73f76bbbfb..eb54b61db0 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -28,9 +28,9 @@ import scala.collection.generic.CanBuildFrom
/** The trait that represents futures.
- *
+ *
* Asynchronous computations that yield futures are created with the `future` call:
- *
+ *
* {{{
* val s = "Hello"
* val f: Future[String] = future {
@@ -40,9 +40,9 @@ import scala.collection.generic.CanBuildFrom
* case msg => println(msg)
* }
* }}}
- *
+ *
* @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang
- *
+ *
* @define multipleCallbacks
* Multiple callbacks may be registered; there is no guarantee that they will be
* executed in a particular order.
@@ -54,18 +54,18 @@ import scala.collection.generic.CanBuildFrom
* - `Error` - errors are not contained within futures
* - `InterruptedException` - not contained within futures
* - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures
- *
+ *
* Instead, the future is completed with a ExecutionException with one of the exceptions above
* as the cause.
* If a future is failed with a `scala.runtime.NonLocalReturnControl`,
* it is completed with a value instead from that throwable instead instead.
- *
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
- *
+ *
* @define forComprehensionExamples
* Example:
- *
+ *
* {{{
* val f = future { 5 }
* val g = future { 3 }
@@ -74,116 +74,116 @@ import scala.collection.generic.CanBuildFrom
* y: Int <- g // returns Future(5)
* } yield x + y
* }}}
- *
+ *
* is translated to:
- *
+ *
* {{{
* f flatMap { (x: Int) => g map { (y: Int) => x + y } }
* }}}
*/
trait Future[+T] extends Awaitable[T] {
self =>
-
+
/* Callbacks */
-
+
/** When this future is completed successfully (i.e. with a value),
* apply the provided partial function to the value if the partial function
* is defined at that value.
- *
+ *
* If the future has already been completed with a value,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* $multipleCallbacks
*/
def onSuccess[U](pf: PartialFunction[T, U]): this.type = onComplete {
case Failure(t) => // do nothing
case Success(v) => if (pf isDefinedAt v) pf(v) else { /*do nothing*/ }
}
-
+
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
- *
+ *
* $caughtThrowables
- *
+ *
* If the future has already been completed with a failure,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* Will not be called in case that the future is completed with a value.
- *
+ *
* $multipleCallbacks
*/
def onFailure[U](callback: PartialFunction[Throwable, U]): this.type = onComplete {
case Failure(t) => if (isFutureThrowable(t) && callback.isDefinedAt(t)) callback(t) else { /*do nothing*/ }
case Success(v) => // do nothing
}
-
+
/** When this future is completed, either through an exception, a timeout, or a value,
* apply the provided function.
- *
+ *
* If the future has already been completed,
* this will either be applied immediately or be scheduled asynchronously.
- *
+ *
* $multipleCallbacks
*/
def onComplete[U](func: Try[T] => U): this.type
-
-
+
+
/* Miscellaneous */
-
+
/** Creates a new promise.
*/
def newPromise[S]: Promise[S]
-
-
+
+
/* Projections */
-
+
/** Returns a failed projection of this future.
- *
+ *
* The failed projection is a future holding a value of type `Throwable`.
- *
+ *
* It is completed with a value which is the throwable of the original future
* in case the original future is failed.
- *
+ *
* It is failed with a `NoSuchElementException` if the original future is completed successfully.
- *
+ *
* Blocking on this future returns a value if the original future is completed with an exception
* and throws a corresponding exception if the original future fails.
*/
def failed: Future[Throwable] = {
- def noSuchElem(v: T) =
+ def noSuchElem(v: T) =
new NoSuchElementException("Future.failed not completed with a throwable. Instead completed with: " + v)
-
+
val p = newPromise[Throwable]
-
+
onComplete {
case Failure(t) => p success t
case Success(v) => p failure noSuchElem(v)
}
-
+
p.future
}
-
-
+
+
/* Monadic operations */
-
+
/** Asynchronously processes the value in the future once the value becomes available.
- *
+ *
* Will not be called if the future fails.
*/
def foreach[U](f: T => U): Unit = onComplete {
case Success(r) => f(r)
case Failure(_) => // do nothing
}
-
+
/** Creates a new future by applying a function to the successful result of
* this future. If this future is completed with an exception then the new
* future will also contain this exception.
- *
+ *
* $forComprehensionExample
*/
def map[S](f: T => S): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -192,23 +192,23 @@ self =>
case t => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by applying a function to the successful result of
* this future, and returns the result of the function as the new future.
* If this future is completed with an exception then the new future will
* also contain this exception.
- *
+ *
* $forComprehensionExample
*/
def flatMap[S](f: T => Future[S]): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
- case Success(v) =>
+ case Success(v) =>
try {
f(v) onComplete {
case Failure(t) => p failure t
@@ -218,15 +218,15 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by filtering the value of the current future with a predicate.
- *
+ *
* If the current future contains a value which satisfies the predicate, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
- *
+ *
* If the current future fails or times out, the resulting future also fails or times out, respectively.
*
* Example:
@@ -240,7 +240,7 @@ self =>
*/
def filter(pred: T => Boolean): Future[T] = {
val p = newPromise[T]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -251,12 +251,12 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future by mapping the value of the current future if the given partial function is defined at that value.
- *
+ *
* If the current future contains a value for which the partial function is defined, the new future will also hold that value.
* Otherwise, the resulting future will fail with a `NoSuchElementException`.
*
@@ -277,7 +277,7 @@ self =>
*/
def collect[S](pf: PartialFunction[T, S]): Future[S] = {
val p = newPromise[S]
-
+
onComplete {
case Failure(t) => p failure t
case Success(v) =>
@@ -288,16 +288,16 @@ self =>
case t: Throwable => p complete resolver(t)
}
}
-
+
p.future
}
-
+
/** Creates a new future that will handle any matching throwable that this
* future might contain. If there is no match, or if this future contains
* a valid result then the new future will contain the same.
- *
+ *
* Example:
- *
+ *
* {{{
* future (6 / 0) recover { case e: ArithmeticException ⇒ 0 } // result: 0
* future (6 / 0) recover { case e: NotFoundException ⇒ 0 } // result: exception
@@ -306,25 +306,25 @@ self =>
*/
def recover[U >: T](pf: PartialFunction[Throwable, U]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) if pf isDefinedAt t =>
try { p success pf(t) }
catch { case t: Throwable => p complete resolver(t) }
case otherwise => p complete otherwise
}
-
+
p.future
}
-
+
/** Creates a new future that will handle any matching throwable that this
* future might contain by assigning it a value of another future.
- *
+ *
* If there is no match, or if this future contains
* a valid result then the new future will contain the same result.
- *
+ *
* Example:
- *
+ *
* {{{
* val f = future { Int.MaxValue }
* future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
@@ -332,7 +332,7 @@ self =>
*/
def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) if pf isDefinedAt t =>
try {
@@ -342,13 +342,13 @@ self =>
}
case otherwise => p complete otherwise
}
-
+
p.future
}
-
+
/** Zips the values of `this` and `that` future, and creates
* a new future holding the tuple of their results.
- *
+ *
* If `this` future fails, the resulting future is failed
* with the throwable stored in `this`.
* Otherwise, if `that` future fails, the resulting future is failed
@@ -356,27 +356,27 @@ self =>
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
val p = newPromise[(T, U)]
-
+
this onComplete {
case Failure(t) => p failure t
case Success(r) => that onSuccess {
case r2 => p success ((r, r2))
}
}
-
+
that onFailure {
case f => p failure f
}
-
+
p.future
}
-
+
/** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
* the result of the `that` future if `that` is completed successfully.
* If both futures are failed, the resulting future holds the throwable object of the first future.
- *
+ *
* Using this method will not cause concurrent programs to become nondeterministic.
- *
+ *
* Example:
* {{{
* val f = future { sys.error("failed") }
@@ -387,7 +387,7 @@ self =>
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
val p = newPromise[U]
-
+
onComplete {
case Failure(t) => that onComplete {
case Failure(_) => p failure t
@@ -395,23 +395,23 @@ self =>
}
case Success(v) => p success v
}
-
+
p.future
}
-
+
/** Applies the side-effecting function to the result of this future, and returns
* a new future with the result of this future.
- *
+ *
* This method allows one to enforce that the callbacks are executed in a
* specified order.
- *
+ *
* Note that if one of the chained `andThen` callbacks throws
* an exception, that exception is not propagated to the subsequent `andThen`
* callbacks. Instead, the subsequent `andThen` callbacks are given the original
* value of this future.
- *
+ *
* The following example prints out `5`:
- *
+ *
* {{{
* val f = future { 5 }
* f andThen {
@@ -424,21 +424,21 @@ self =>
*/
def andThen[U](pf: PartialFunction[Try[T], U]): Future[T] = {
val p = newPromise[T]
-
+
onComplete {
case r =>
try if (pf isDefinedAt r) pf(r)
finally p complete r
}
-
+
p.future
}
-
+
/** Creates a new future which holds the result of either this future or `that` future, depending on
* which future was completed first.
- *
+ *
* $nonDeterministic
- *
+ *
* Example:
* {{{
* val f = future { sys.error("failed") }
@@ -449,42 +449,42 @@ self =>
*/
def either[U >: T](that: Future[U]): Future[U] = {
val p = self.newPromise[U]
-
+
val completePromise: PartialFunction[Try[U], _] = {
case Failure(t) => p tryFailure t
case Success(v) => p trySuccess v
}
-
+
self onComplete completePromise
that onComplete completePromise
-
+
p.future
}
-
+
}
/** TODO some docs
- *
+ *
* @define nonDeterministic
* Note: using this method yields nondeterministic dataflow programs.
*/
object Future {
-
+
// TODO make more modular by encoding all other helper methods within the execution context
/** TODO some docs
*/
def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]], ec: ExecutionContext): Future[Coll[T]] =
ec.all[T, Coll](futures)
-
+
// move this to future companion object
@inline def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = executor.future(body)
def any[T](futures: Traversable[Future[T]])(implicit ec: ExecutionContext): Future[T] = ec.any(futures)
def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean)(implicit ec: ExecutionContext): Future[Option[T]] = ec.find(futures)(predicate)
-
+
}
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index bac9d4f558..127a0e0055 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -48,9 +48,9 @@ object JavaConversions {
// do nothing
}
}
-
+
implicit def asExecutionContext(exec: ExecutorService): ExecutionContext = null // TODO
-
+
implicit def asExecutionContext(exec: Executor): ExecutionContext = null // TODO
-
+
}
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index f26deb77ab..4404e90971 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -24,36 +24,36 @@ import scala.util.{ Try, Success, Failure }
* If the throwable used to fail this promise is an error, a control exception
* or an interrupted exception, it will be wrapped as a cause within an
* `ExecutionException` which will fail the promise.
- *
+ *
* @define nonDeterministic
* Note: Using this method may result in non-deterministic concurrent programs.
*/
trait Promise[T] {
-
+
import nondeterministic._
-
+
/** Future containing the value of this promise.
*/
def future: Future[T]
-
+
/** Completes the promise with either an exception or a value.
- *
+ *
* @param result Either the value or the exception to complete the promise with.
- *
+ *
* $promiseCompletion
*/
def complete(result:Try[T]): this.type = if (tryComplete(result)) this else throwCompleted
-
+
/** Tries to complete the promise with either a value or the exception.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def tryComplete(result: Try[T]): Boolean
-
+
/** Completes this promise with the specified future, once that future is completed.
- *
+ *
* @return This promise
*/
final def completeWith(other: Future[T]): this.type = {
@@ -62,64 +62,64 @@ trait Promise[T] {
}
this
}
-
+
/** Completes the promise with a value.
- *
+ *
* @param value The value to complete the promise with.
- *
+ *
* $promiseCompletion
*/
def success(v: T): this.type = if (trySuccess(v)) this else throwCompleted
-
+
/** Tries to complete the promise with a value.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def trySuccess(value: T): Boolean = tryComplete(Success(value))
-
+
/** Completes the promise with an exception.
- *
+ *
* @param t The throwable to complete the promise with.
- *
+ *
* $allowedThrowables
- *
+ *
* $promiseCompletion
*/
def failure(t: Throwable): this.type = if (tryFailure(t)) this else throwCompleted
-
+
/** Tries to complete the promise with an exception.
- *
+ *
* $nonDeterministic
- *
+ *
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
-
+
/** Wraps a `Throwable` in an `ExecutionException` if necessary. TODO replace with `resolver` from scala.concurrent
- *
+ *
* $allowedThrowables
*/
protected def wrap(t: Throwable): Throwable = t match {
case t: Throwable if isFutureThrowable(t) => t
case _ => new ExecutionException(t)
}
-
+
private def throwCompleted = throw new IllegalStateException("Promise already completed.")
-
+
}
object Promise {
-
+
def kept[T](result: T)(implicit execctx: ExecutionContext): Promise[T] =
execctx keptPromise result
-
- def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] =
+
+ def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] =
execctx brokenPromise t
-
+
}
diff --git a/src/library/scala/concurrent/Task.scala b/src/library/scala/concurrent/Task.scala
index d6f86bac31..eb3efbb422 100644
--- a/src/library/scala/concurrent/Task.scala
+++ b/src/library/scala/concurrent/Task.scala
@@ -3,11 +3,11 @@ package scala.concurrent
trait Task[+T] {
-
+
def start(): Unit
-
+
def future: Future[T]
-
+
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index af0eb66292..7b44d02612 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -19,7 +19,7 @@ import scala.collection.mutable.Stack
class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionContext {
import ExecutionContextImpl._
-
+
def execute(runnable: Runnable): Unit = executorService match {
// case fj: ForkJoinPool =>
// TODO fork if more applicable
@@ -27,16 +27,16 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
case _ =>
executorService execute runnable
}
-
+
def execute[U](body: () => U): Unit = execute(new Runnable {
def run() = body()
})
-
+
def promise[T]: Promise[T] = new Promise.DefaultPromise[T]()(this)
-
+
def future[T](body: =>T): Future[T] = {
val p = promise[T]
-
+
dispatchFuture {
() =>
p complete {
@@ -47,39 +47,39 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
}
}
}
-
+
p.future
}
-
+
def blocking[T](atMost: Duration)(body: =>T): T = blocking(body2awaitable(body), atMost)
-
+
def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = {
currentExecutionContext.get match {
case null => awaitable.await(atMost)(null) // outside - TODO - fix timeout case
case x => x.blockingCall(awaitable) // inside an execution context thread
}
}
-
+
def reportFailure(t: Throwable) = t match {
case e: Error => throw e // rethrow serious errors
case t => t.printStackTrace()
}
-
+
/** Only callable from the tasks running on the same execution context. */
private def blockingCall[T](body: Awaitable[T]): T = {
releaseStack()
-
+
// TODO see what to do with timeout
body.await(Duration.fromNanos(0))(CanAwaitEvidence)
}
-
+
// an optimization for batching futures
// TODO we should replace this with a public queue,
// so that it can be stolen from
// OR: a push to the local task queue should be so cheap that this is
// not even needed, but stealing is still possible
private val _taskStack = new ThreadLocal[Stack[() => Unit]]()
-
+
private def releaseStack(): Unit =
_taskStack.get match {
case stack if (stack ne null) && stack.nonEmpty =>
@@ -92,7 +92,7 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
case _ =>
_taskStack.remove()
}
-
+
private[impl] def dispatchFuture(task: () => Unit, force: Boolean = false): Unit =
_taskStack.get match {
case stack if (stack ne null) && !force => stack push task
@@ -119,16 +119,16 @@ class ExecutionContextImpl(executorService: ExecutorService) extends ExecutionCo
}
)
}
-
+
}
object ExecutionContextImpl {
-
+
private[concurrent] def currentExecutionContext: ThreadLocal[ExecutionContextImpl] = new ThreadLocal[ExecutionContextImpl] {
override protected def initialValue = null
}
-
+
}
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 24d0258cc8..9466761d4d 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -13,35 +13,35 @@ import scala.util.{ Try, Success, Failure }
//import scala.util.continuations._
trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
-
+
implicit def executor: ExecutionContextImpl
-
+
/** For use only within a Future.flow block or another compatible Delimited Continuations reset block.
- *
+ *
* Returns the result of this Future without blocking, by suspending execution and storing it as a
* continuation until the result is available.
*/
//def apply(): T @cps[Future[Any]] = shift(this flatMap (_: T => Future[Any]))
-
+
/** Tests whether this Future has been completed.
*/
final def isCompleted: Boolean = value.isDefined
-
+
/** The contained value of this Future. Before this Future is completed
* the value will be None. After completion the value will be Some(Right(t))
* if it contains a valid result, or Some(Left(error)) if it contains
* an exception.
*/
def value: Option[Try[T]]
-
+
def onComplete[U](func: Try[T] => U): this.type
-
+
/** Creates a new Future[A] which is completed with this Future's result if
* that conforms to A's erased type or a ClassCastException otherwise.
*/
final def mapTo[T](implicit m: Manifest[T]) = {
val p = executor.promise[T]
-
+
onComplete {
case f @ Failure(t) => p complete f.asInstanceOf[Try[T]]
case Success(v) =>
@@ -51,7 +51,7 @@ trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
case e: ClassCastException ⇒ Failure(e)
})
}
-
+
p.future
}
@@ -65,7 +65,7 @@ trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
def flatMap[B](f: A => Future[B]) = self filter p flatMap f
def withFilter(q: A => Boolean): FutureWithFilter[A] = new FutureWithFilter[A](self, x ⇒ p(x) && q(x))
}
-
+
}
object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 7ef76e1501..585f71f3cf 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -23,11 +23,11 @@ import scala.annotation.tailrec
trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
-
+
def future = this
-
+
def newPromise[S]: Promise[S] = executor promise
-
+
// TODO refine answer and return types here from Any to type parameters
// then move this up in the hierarchy
/*
@@ -40,7 +40,7 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
cont: (Future[T] => Future[Any]) =>
val p = executor.promise[Any]
val thisPromise = this
-
+
thisPromise completeWith other
thisPromise onComplete { v =>
try {
@@ -49,12 +49,12 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
case e => p complete resolver(e)
}
}
-
+
p.future
}
*/
// TODO finish this once we introduce something like dataflow streams
-
+
/*
final def <<(stream: PromiseStreamOut[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) =>
val fr = executor.promise[Any]
@@ -70,40 +70,40 @@ trait Promise[T] extends scala.concurrent.Promise[T] with Future[T] {
fr
}
*/
-
+
}
object Promise {
def dur2long(dur: Duration): Long = if (dur.isFinite) dur.toNanos else Long.MaxValue
-
+
def EmptyPending[T](): FState[T] = emptyPendingValue.asInstanceOf[FState[T]]
-
+
/** Represents the internal state.
*/
sealed trait FState[+T] { def value: Option[Try[T]] }
-
+
case class Pending[T](listeners: List[Try[T] => Any] = Nil) extends FState[T] {
def value: Option[Try[T]] = None
}
-
+
case class Success[T](value: Option[util.Success[T]] = None) extends FState[T] {
def result: T = value.get.get
}
-
+
case class Failure[T](value: Option[util.Failure[T]] = None) extends FState[T] {
def exception: Throwable = value.get.exception
}
-
+
private val emptyPendingValue = Pending[Nothing](Nil)
-
+
/** Default promise implementation.
*/
class DefaultPromise[T](implicit val executor: ExecutionContextImpl) extends AbstractPromise with Promise[T] {
self =>
-
+
updater.set(this, Promise.EmptyPending())
-
+
protected final def tryAwait(atMost: Duration): Boolean = {
@tailrec
def awaitUnsafe(waitTimeNanos: Long): Boolean = {
@@ -118,36 +118,36 @@ object Promise {
} catch {
case e: InterruptedException =>
}
-
+
awaitUnsafe(waitTimeNanos - (System.nanoTime() - start))
} else
value.isDefined
}
-
+
executor.blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), Duration.fromNanos(0))
}
-
+
private def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
if (value.isDefined || tryAwait(atMost)) this
else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds")
-
+
def await(atMost: Duration)(implicit permit: CanAwait): T =
ready(atMost).value.get match {
case util.Failure(e) => throw e
case util.Success(r) => r
}
-
+
def value: Option[Try[T]] = getState.value
-
+
@inline
private[this] final def updater = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]]
-
+
@inline
protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = updater.compareAndSet(this, oldState, newState)
-
+
@inline
protected final def getState: FState[T] = updater.get(this)
-
+
def tryComplete(value: Try[T]): Boolean = {
val callbacks: List[Try[T] => Any] = {
try {
@@ -165,7 +165,7 @@ object Promise {
synchronized { notifyAll() } // notify any blockers from `tryAwait`
}
}
-
+
callbacks match {
case null => false
case cs if cs.isEmpty => true
@@ -176,7 +176,7 @@ object Promise {
true
}
}
-
+
def onComplete[U](func: Try[T] => U): this.type = {
@tailrec // Returns whether the future has already been completed or not
def tryAddCallback(): Boolean = {
@@ -188,17 +188,17 @@ object Promise {
if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false else tryAddCallback()
}
}
-
+
if (tryAddCallback()) {
val result = value.get
executor dispatchFuture {
() => notifyCompleted(func, result)
}
}
-
+
this
}
-
+
private final def notifyCompleted(func: Try[T] => Any, result: Try[T]) {
try {
func(result)
@@ -207,16 +207,16 @@ object Promise {
}
}
}
-
+
/** An already completed Future is given its result at creation.
- *
+ *
* Useful in Future-composition when a value to contribute is already available.
*/
final class KeptPromise[T](suppliedValue: Try[T])(implicit val executor: ExecutionContextImpl) extends Promise[T] {
val value = Some(resolve(suppliedValue))
-
+
def tryComplete(value: Try[T]): Boolean = false
-
+
def onComplete[U](func: Try[T] => U): this.type = {
val completedAs = value.get
executor dispatchFuture {
@@ -224,15 +224,15 @@ object Promise {
}
this
}
-
+
private def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
-
+
def await(atMost: Duration)(implicit permit: CanAwait): T = value.get match {
case util.Failure(e) => throw e
case util.Success(r) => r
}
}
-
+
}
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 6a98fd50c2..7cc48c09b2 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -25,31 +25,31 @@ package concurrent {
catch { case _ => }
awaitable
}
-
+
def result[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): T = {
blocking(awaitable, atMost)
}
}
-
+
/** Importing this object allows using some concurrency primitives
* on futures and promises that can yield nondeterministic programs.
- *
+ *
* While program determinism is broken when using these primitives,
* some programs cannot be written without them (e.g. multiple client threads
* cannot send requests to a server thread through regular promises and futures).
*/
object nondeterministic { }
-
+
/** A timeout exception.
- *
+ *
* Futures are failed with a timeout exception when their timeout expires.
- *
+ *
* Each timeout exception contains an origin future which originally timed out.
*/
class FutureTimeoutException(origin: Future[_], message: String) extends TimeoutException(message) {
def this(origin: Future[_]) = this(origin, "Future timed out.")
}
-
+
final class DurationOps private[concurrent] (x: Int) {
// TODO ADD OTHERS
def ns = util.Duration.fromNanos(0)
diff --git a/src/library/scala/reflect/ReflectionUtils.scala b/src/library/scala/reflect/ReflectionUtils.scala
index dfadfb4976..510f0819c6 100644
--- a/src/library/scala/reflect/ReflectionUtils.scala
+++ b/src/library/scala/reflect/ReflectionUtils.scala
@@ -29,13 +29,13 @@ object ReflectionUtils {
def singletonInstance(className: String, cl: ClassLoader = getClass.getClassLoader): AnyRef = {
val name = if (className endsWith "$") className else className + "$"
- val clazz = java.lang.Class.forName(name, true, cl)
+ val clazz = java.lang.Class.forName(name, true, cl)
val singleton = clazz getField "MODULE$" get null
singleton
}
// Retrieves the MODULE$ field for the given class name.
- def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
+ def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
try Some(singletonInstance(className, cl))
catch { case _: ClassNotFoundException => None }
}
diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala
index 448dca752c..cea9e1a37d 100644
--- a/src/library/scala/reflect/api/Mirror.scala
+++ b/src/library/scala/reflect/api/Mirror.scala
@@ -13,11 +13,11 @@ trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil {
* to do: throws anything else?
*/
def symbolForName(name: String): Symbol
-
+
/** Return a reference to the companion object of the given class symbol.
*/
def companionInstance(clazz: Symbol): AnyRef
-
+
/** The Scala class symbol corresponding to the runtime class of the given instance.
* @param instance The instance
* @return The class Symbol for the instance
diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala
index c0123ed955..cbfe91e59b 100644
--- a/src/library/scala/reflect/api/Modifier.scala
+++ b/src/library/scala/reflect/api/Modifier.scala
@@ -69,7 +69,7 @@ object Modifier extends immutable.Set[Modifier] {
val parameter = SymbolModifier("parameter")
val preSuper = SymbolModifier("preSuper")
val static = SymbolModifier("static")
-
+
val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet
val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet
val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers
diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala
index 3a00f21c8c..c72774dfc7 100755
--- a/src/library/scala/reflect/api/Names.scala
+++ b/src/library/scala/reflect/api/Names.scala
@@ -6,7 +6,7 @@ package api
* The same string can be a name in both universes.
* Two names are equal if they represent the same string and they are
* members of the same universe.
- *
+ *
* Names are interned. That is, for two names `name11 and `name2`,
* `name1 == name2` implies `name1 eq name2`.
*/
@@ -42,7 +42,7 @@ trait Names {
* Example: `foo_$plus$eq` becomes `foo_+=`
*/
def encoded: String
-
+
/** The decoded name, still represented as a name.
*/
def decodedName: Name
diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala
index 15d754b5b4..44dc2ce1c2 100755
--- a/src/library/scala/reflect/api/Symbols.scala
+++ b/src/library/scala/reflect/api/Symbols.scala
@@ -18,7 +18,7 @@ trait Symbols { self: Universe =>
/** A list of annotations attached to this Symbol.
*/
def annotations: List[self.AnnotationInfo]
-
+
/** Whether this symbol carries an annotation for which the given
* symbol is its typeSymbol.
*/
@@ -99,7 +99,7 @@ trait Symbols { self: Universe =>
* method, or `NoSymbol` if none exists.
*/
def enclosingMethod: Symbol
-
+
/** If this symbol is a package class, this symbol; otherwise the next enclosing
* package class, or `NoSymbol` if none exists.
*/
@@ -170,7 +170,7 @@ trait Symbols { self: Universe =>
* `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`.
*/
def asTypeConstructor: Type // needed by LiftCode
-
+
/** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`.
*/
def thisPrefix: Type
@@ -181,10 +181,10 @@ trait Symbols { self: Universe =>
def selfType: Type
/** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
- * the current symbol as its owner.
+ * the current symbol as its owner.
*/
def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode
-
+
/** Low-level operation to set the symbol's flags
* @return the symbol itself
*/
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 19bfd09b81..21b55e9c0e 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -55,7 +55,7 @@ trait TreePrinters { self: Universe =>
print(")")
if (typesPrinted)
print(".setType(", tree.tpe, ")")
- case list: List[_] =>
+ case list: List[_] =>
print("List(")
val it = list.iterator
while (it.hasNext) {
@@ -64,16 +64,16 @@ trait TreePrinters { self: Universe =>
}
print(")")
case mods: Modifiers =>
- val parts = collection.mutable.ListBuffer[String]()
+ val parts = collection.mutable.ListBuffer[String]()
parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")"
parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")"
-
+
var keep = 3
if (keep == 3 && mods.annotations.isEmpty) keep -= 1
if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1
if (keep == 1 && mods.modifiers.isEmpty) keep -= 1
-
+
print("Modifiers(", parts.take(keep).mkString(", "), ")")
case name: Name =>
if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala
index 8a91956320..cc8e85b9c8 100755
--- a/src/library/scala/reflect/api/Types.scala
+++ b/src/library/scala/reflect/api/Types.scala
@@ -140,7 +140,7 @@ trait Types { self: Universe =>
* If this is not a singleton type, returns this type itself.
*
* Example:
- *
+ *
* class Outer { class C ; val x: C }
* val o: Outer
* <o.x.type>.widen = o.C
diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala
index ebbd4735e5..2fd9bb6484 100644
--- a/src/library/scala/reflect/macro/Context.scala
+++ b/src/library/scala/reflect/macro/Context.scala
@@ -2,11 +2,11 @@ package scala.reflect
package macro
trait Context extends api.Universe {
-
+
/** Mark a variable as captured; i.e. force boxing in a *Ref type.
*/
def captureVariable(vble: Symbol): Unit
-
+
/** Mark given identifier as a reference to a captured variable itself
* suppressing dereferencing with the `elem` field.
*/
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index b24474f35d..b876869afb 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -25,7 +25,7 @@ import Specializable._
* @since 2.8
*/
// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
-
+
class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
def this(types: Specializable*) = this(new Group(types.toList))
def this() = this(Everything)
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index edc60a1bb5..77e36f6196 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -97,7 +97,7 @@ object BasicIO {
*
* @param withIn True if the process input should be attached to stdin.
* @param buffer A `StringBuffer` which will receive the process normal
- * output.
+ * output.
* @param log An optional `ProcessLogger` to which the output should be
* sent. If `None`, output will be sent to stderr.
* @return A `ProcessIO` with the characteristics above.
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 22de5544a8..a62d74b1f6 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -142,7 +142,7 @@ private[scala] trait PropertiesTrait {
*/
def isWin = osName startsWith "Windows"
def isMac = javaVendor startsWith "Apple"
-
+
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index a05a75e0b7..c9bde81317 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -15,7 +15,7 @@ import collection.Seq
/**
- * The `Try` type represents a computation that may either result in an exception,
+ * The `Try` type represents a computation that may either result in an exception,
* or return a success value. It's analagous to the `Either` type.
*/
sealed abstract class Try[+T] {
@@ -55,9 +55,9 @@ sealed abstract class Try[+T] {
def map[U](f: T => U): Try[U]
def collect[U](pf: PartialFunction[T, U]): Try[U]
-
+
def exists(p: T => Boolean): Boolean
-
+
/**
* Converts this to a `Failure` if the predicate is not satisfied.
*/
@@ -77,14 +77,14 @@ sealed abstract class Try[+T] {
* Calls the exceptionHandler with the exception if this is a `Failure`. This is like map for the exception.
*/
def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U]
-
+
/**
* Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
*/
def toOption = if (isSuccess) Some(get) else None
def toSeq = if (isSuccess) Seq(get) else Seq()
-
+
/**
* Returns the given function applied to the value from this Success or returns this if this is a `Failure`.
* Alias for `flatMap`.
@@ -92,11 +92,11 @@ sealed abstract class Try[+T] {
def andThen[U](f: T => Try[U]): Try[U] = flatMap(f)
/**
- * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
+ * Transforms a nested `Try`, i.e., a `Try` of type `Try[Try[T]]`,
* into an un-nested `Try`, i.e., a `Try` of type `Try[T]`.
*/
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
-
+
def failed: Try[Throwable]
}
@@ -109,7 +109,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
if (rescueException.isDefinedAt(exception)) rescueException(exception) else this
} catch {
case e2 => Failure(e2)
- }
+ }
}
def get: T = throw exception
def flatMap[U](f: T => Try[U]): Try[U] = Failure[U](exception)
@@ -118,7 +118,7 @@ final case class Failure[+T](val exception: Throwable) extends Try[T] {
def map[U](f: T => U): Try[U] = Failure[U](exception)
def collect[U](pf: PartialFunction[T, U]): Try[U] = Failure[U](exception)
def filter(p: T => Boolean): Try[T] = this
- def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
if (rescueException.isDefinedAt(exception)) {
Try(rescueException(exception))
} else {
@@ -134,10 +134,10 @@ final case class Success[+T](r: T) extends Try[T] {
def isSuccess = true
def rescue[U >: T](rescueException: PartialFunction[Throwable, Try[U]]): Try[U] = Success(r)
def get = r
- def flatMap[U](f: T => Try[U]): Try[U] =
- try f(r)
- catch {
- case e => Failure(e)
+ def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(r)
+ catch {
+ case e => Failure(e)
}
def flatten[U](implicit ev: T <:< Try[U]): Try[U] = r
def foreach[U](f: T => U): Unit = f(r)
@@ -145,7 +145,7 @@ final case class Success[+T](r: T) extends Try[T] {
def collect[U](pf: PartialFunction[T, U]): Try[U] =
if (pf isDefinedAt r) Success(pf(r))
else Failure[U](new NoSuchElementException("Partial function not defined at " + r))
- def filter(p: T => Boolean): Try[T] =
+ def filter(p: T => Boolean): Try[T] =
if (p(r)) this
else Failure(new NoSuchElementException("Predicate does not hold for " + r))
def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
@@ -155,11 +155,11 @@ final case class Success[+T](r: T) extends Try[T] {
object Try {
-
+
def apply[T](r: => T): Try[T] = {
try { Success(r) } catch {
case e => Failure(e)
}
}
-
+
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 4004a01ad9..27e9112fce 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -487,7 +487,7 @@ trait Parsers {
}
/** Changes the error message produced by a parser.
- *
+ *
* This doesn't change the behavior of a parser on neither
* success nor failure, just on error. The semantics are
* slightly different than those obtained by doing `| error(msg)`,
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index 3e0b02a415..39d68dbc18 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -165,7 +165,7 @@ object EmitManPage {
def main(args: Array[String]) = args match{
case Array(classname) => emitManPage(classname)
- case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
+ case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
case _ => sys.exit(1)
}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index dd06c051a4..1cb09b433a 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -19,7 +19,7 @@ abstract class CompilerTest extends DirectTest {
lazy val global: Global = newCompiler()
lazy val units = compilationUnits(global)(sources: _ *)
-
+
override def extraSettings = "-usejavacp -d " + testOutput.path
def sources: List[String] = List(code)
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 74f511aa4e..07444f8d4b 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -63,7 +63,7 @@ abstract class DirectTest extends App {
global.reporter.reset()
f(new global.Run)
}
-
+
// compile the code, optionally first adding to the settings
def compile(args: String*) = compileString(newCompiler(args: _*))(code)
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index 4fe337b19f..ac04c64c33 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -44,7 +44,7 @@ object PathSettings {
lazy val srcCodeLibDir = Directory(srcDir / "codelib")
lazy val srcCodeLib: File = (
- findJar(srcCodeLibDir, "code")
+ findJar(srcCodeLibDir, "code")
orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 84f28af7ce..1a4b3456b8 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -68,7 +68,7 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
val chunk: Array[Byte] = new Array[Byte](length)
System.arraycopy(bytes, pos, chunk, 0, length)
val str = new String(io.Codec.fromUTF8(bytes, pos, length))
-
+
StringBytesPair(str, chunk)
}