From 549dc880c5525e3a2f3ea6af35c7ae8a349b2bdc Mon Sep 17 00:00:00 2001 From: mpociecha Date: Sat, 13 Dec 2014 16:57:54 +0100 Subject: Fix many typos in docs and comments This commit corrects many typos found in scaladocs, comments and documentation. It should reduce a bit number of PRs which fix one typo. There are no changes in the 'real' code except one corrected name of a JUnit test method and some error messages in exceptions. In the case of typos in other method or field names etc., I just skipped them. Obviously this commit doesn't fix all existing typos. I just generated in IntelliJ the list of potential typos and looked through it quickly. --- src/compiler/scala/tools/ant/Scalac.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- src/compiler/scala/tools/nsc/PhaseAssembly.scala | 2 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 2 +- .../scala/tools/nsc/backend/icode/Primitives.scala | 2 +- .../backend/icode/analysis/TypeFlowAnalysis.scala | 4 ++-- .../tools/nsc/backend/jvm/BCodeSyncAndTry.scala | 2 +- .../scala/tools/nsc/backend/jvm/BTypes.scala | 10 +++++----- .../tools/nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../scala/tools/nsc/backend/jvm/CoreBTypes.scala | 8 ++++---- .../scala/tools/nsc/backend/jvm/GenASM.scala | 4 ++-- .../scala/tools/nsc/backend/jvm/opt/LocalOpt.scala | 20 ++++++++++---------- .../tools/nsc/backend/opt/ConstantOptimization.scala | 4 ++-- .../tools/nsc/backend/opt/DeadCodeElimination.scala | 2 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../scala/tools/nsc/classpath/FlatClassPath.scala | 4 ++-- .../scala/tools/nsc/settings/ScalaSettings.scala | 6 +++--- src/compiler/scala/tools/nsc/settings/Warnings.scala | 2 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 2 +- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 4 ++-- .../scala/tools/nsc/transform/Constructors.scala | 2 +- .../scala/tools/nsc/transform/Delambdafy.scala | 10 +++++----- src/compiler/scala/tools/nsc/transform/Erasure.scala | 4 ++-- .../scala/tools/nsc/transform/ExplicitOuter.scala | 2 +- src/compiler/scala/tools/nsc/transform/Flatten.scala | 2 +- .../scala/tools/nsc/transform/LambdaLift.scala | 2 +- .../scala/tools/nsc/transform/OverridingPairs.scala | 2 +- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- .../scala/tools/nsc/transform/patmat/Logic.scala | 2 +- .../tools/nsc/transform/patmat/MatchAnalysis.scala | 2 +- .../nsc/transform/patmat/MatchTranslation.scala | 2 +- .../tools/nsc/transform/patmat/MatchTreeMaking.scala | 2 +- .../scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 10 +++++----- .../scala/tools/nsc/typechecker/Implicits.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../scala/tools/nsc/typechecker/NamesDefaults.scala | 2 +- .../scala/tools/nsc/typechecker/PatternTypers.scala | 2 +- .../tools/nsc/typechecker/SyntheticMethods.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/compiler/scala/tools/nsc/util/DocStrings.scala | 2 +- 42 files changed, 76 insertions(+), 76 deletions(-) (limited to 'src/compiler/scala/tools') diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 8905c94eeb..13bf0ef4c6 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -479,7 +479,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Tests if a file exists and prints a warning in case it doesn't. Always * returns the file, even if it doesn't exist. - * @param file A file to test for existance. + * @param file A file to test for existence. * @return The same file. */ protected def existing(file: File): File = { if (!file.exists) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 733664c30a..1c9dbad4dd 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -234,7 +234,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Called by ScalaDocAnalyzer when a doc comment has been parsed. */ def signalParsedDocComment(comment: String, pos: Position) = { - // TODO: this is all very borken (only works for scaladoc comments, not regular ones) + // TODO: this is all very broken (only works for scaladoc comments, not regular ones) // --> add hooks to parser and refactor Interactive global to handle comments directly // in any case don't use reporter for parser hooks reporter.comment(pos, comment) @@ -1461,7 +1461,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } - /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */ + /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions /** Compile list of source files, diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 1eb6c9da2c..e1cfa63960 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -18,7 +18,7 @@ trait PhaseAssembly { /** * Aux datastructure for solving the constraint system - * The depency graph container with helper methods for node and edge creation + * The dependency graph container with helper methods for node and edge creation */ private class DependencyGraph { diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index d9f56b47fa..f385acdb41 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1077,7 +1077,7 @@ abstract class GenICode extends SubComponent { () case (_, UNIT) => ctx.bb.emit(DROP(from), pos) - // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem + // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => coerce(from, to) case _ => diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index f81c42d836..27bf836484 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -60,7 +60,7 @@ trait Primitives { self: ICodes => // type : (buf,el) => buf // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR } - // jvm : It should call the appropiate 'append' method on StringBuffer + // jvm : It should call the appropriate 'append' method on StringBuffer case class StringConcat(el: TypeKind) extends Primitive /** Signals the beginning of a series of concatenations. diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 676ee12683..b0ad5bdaf9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -332,13 +332,13 @@ abstract class TypeFlowAnalysis { `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time. Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next). - A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining. + A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining. But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks. In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite. Those basic blocks not in that subgraph can be skipped altogether. That's why: - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs` - same check is performed before adding a block to the worklist, and as part of choosing successors. - The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis. + The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis. The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index 7c95b7fc3b..b94208c1a5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -284,7 +284,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - // a note on terminology: this is not "postHandlers", despite appearences. + // a note on terminology: this is not "postHandlers", despite appearances. // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. if (hasFinally) { nopIfNeeded(startTryBody) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 53ac5bfdc7..7defd7c873 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -11,7 +11,7 @@ import asm.Opcodes /** * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information - * that is required after building the ASM nodes. This includes optimizations, geneartion of + * that is required after building the ASM nodes. This includes optimizations, generation of * InnerClass attributes and generation of stack map frames. * * This representation is immutable and independent of the compiler data structures, hence it can @@ -49,7 +49,7 @@ abstract class BTypes { import coreBTypes._ /** - * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType + * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType * referring to BTypes. */ /*sealed*/ trait BType { // Not sealed for now due to SI-8546 @@ -369,7 +369,7 @@ abstract class BTypes { * * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class * - static initializer: executed before constructor body - * - instance initializer: exectued when class is initialized (instance creation, static + * - instance initializer: executed when class is initialized (instance creation, static * field access, ...) * * - A static nested class can be defined as @@ -540,7 +540,7 @@ abstract class BTypes { * * class A { * void f() { class B {} } - * static void g() { calss C {} } + * static void g() { class C {} } * } * * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. @@ -820,7 +820,7 @@ abstract class BTypes { * * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes - * a source-level propety: if the class is in a static context (does not have an outer pointer). + * a source-level property: if the class is in a static context (does not have an outer pointer). * This is checked when building the NestedInfo. */ case class NestedInfo(enclosingClass: ClassBType, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 3b7cbd6392..e3b812f413 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -10,7 +10,7 @@ import scala.tools.asm /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary - * information from a symbol and its type to create the correpsonding ClassBType. It requires + * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). * * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index fac3c93be2..246235f395 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -4,7 +4,7 @@ package backend.jvm import scala.annotation.switch /** - * Core BTypes and some other definitions. The initialization of these definitions requies access + * Core BTypes and some other definitions. The initialization of these definitions requires access * to symbols / types (global). * * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To @@ -18,11 +18,11 @@ import scala.annotation.switch * * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned + * constructor will actually go through the proxy. The lazy vals make sure the instance is assigned * in the proxy before the fields are initialized. * * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are + * could not be a perRunCache anymore: the classes defined here need to be in that map, they are * added when the ClassBTypes are created. The per run cache removes them, so they would be missing * in the second run. */ @@ -192,7 +192,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { } /** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core + * This trait make some core BTypes available that don't depend on a Global instance. Some core * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. * * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index e56a20c2e7..4373d997b8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2947,7 +2947,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => } // end of class JBeanInfoBuilder /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for. - * In particualr, IMethod.normalize() doesn't collapseJumpChains(). + * In particular, IMethod.normalize() doesn't collapseJumpChains(). * * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them). */ @@ -3162,7 +3162,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => } } - // remove the unusued exception handler references + // remove the unused exception handler references if (settings.debug) for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") m.exh = m.exh filterNot unusedExceptionHandlers diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 273112b93c..3bd2e5c4f4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -19,16 +19,16 @@ import scala.tools.nsc.settings.ScalaSettings * Optimizations within a single method. * * unreachable code - * - removes instrucions of basic blocks to which no branch instruction points + * - removes instructions of basic blocks to which no branch instruction points * + enables eliminating some exception handlers and local variable descriptors * > eliminating them is required for correctness, as explained in `removeUnreachableCode` * * empty exception handlers * - removes exception handlers whose try block is empty * + eliminating a handler where the try block is empty and reachable will turn the catch block - * unreachble. in this case "unreachable code" is invoked recursively until reaching a fixpiont. + * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint. * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the - * catch block, and the recrusive invocation is not necessary. + * catch block, and the recursive invocation is not necessary. * * simplify jumps * - various simplifications, see doc domments of individual optimizations @@ -52,7 +52,7 @@ class LocalOpt(settings: ScalaSettings) { * cleanups to the bytecode. * * @param clazz The class whose methods are optimized - * @return `true` if unreachable code was elminated in some method, `false` otherwise. + * @return `true` if unreachable code was eliminated in some method, `false` otherwise. */ def methodOptimizations(clazz: ClassNode): Boolean = { settings.Yopt.value.nonEmpty && clazz.methods.asScala.foldLeft(false) { @@ -66,7 +66,7 @@ class LocalOpt(settings: ScalaSettings) { * We rely on dead code elimination provided by the ASM framework, as described in the ASM User * Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only * computes Frame information for reachable instructions. Instructions for which no Frame data is - * available after the analyis are unreachable. + * available after the analysis are unreachable. * * Also simplifies branching instructions, removes unused local variable descriptors, empty * exception handlers, unnecessary label declarations and empty line number nodes. @@ -240,7 +240,7 @@ class LocalOpt(settings: ScalaSettings) { } /** - * The number of local varialbe slots used for parameters and for the `this` reference. + * The number of local variable slots used for parameters and for the `this` reference. */ private def parametersSize(method: MethodNode): Int = { // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for @@ -322,7 +322,7 @@ class LocalOpt(settings: ScalaSettings) { * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM * framework only computes these values during bytecode generation. * - * Sicne there's currently no better way, we run a bytecode generator on the method and extract + * Since there's currently no better way, we run a bytecode generator on the method and extract * the computed values. This required changes to the ASM codebase: * - the [[MethodWriter]] class was made public * - accessors for maxLocals / maxStack were added to the MethodWriter class @@ -345,7 +345,7 @@ class LocalOpt(settings: ScalaSettings) { * Removes LineNumberNodes that don't describe any executable instructions. * * This method expects (and asserts) that the `start` label of each LineNumberNode is the - * lexically preceeding label declaration. + * lexically preceding label declaration. */ def removeEmptyLineNumbers(method: MethodNode): Boolean = { def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match { @@ -510,7 +510,7 @@ class LocalOpt(settings: ScalaSettings) { * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...] * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...] * - * Note that no label definitions are allowed in the first [nops] section. Otherwsie, there could + * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could * be some other jump to the GOTO, and eliminating it would change behavior. * * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option @@ -542,7 +542,7 @@ class LocalOpt(settings: ScalaSettings) { * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW * * inlining is only done if the GOTO instruction is not part of a try block, otherwise the - * rewrite might change the behavior. For xRETURN, the reason is that return insructions may throw + * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw * an IllegalMonitorStateException, as described here: * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index c6e699373b..0e6ee76eb2 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -18,7 +18,7 @@ import scala.annotation.tailrec * * With some more work it could be extended to * - cache stable values (final fields, modules) in locals - * - replace the copy propagation in ClosureElilmination + * - replace the copy propagation in ClosureElimination * - fold constants * - eliminate unnecessary stores and loads * - propagate knowledge gathered from conditionals for further optimization @@ -437,7 +437,7 @@ abstract class ConstantOptimization extends SubComponent { // TODO if we do all that we need to be careful in the // case that success and failure are the same target block // because we're using a Map and don't want one possible state to clobber the other - // alternative mayb we should just replace the conditional with a jump if both targets are the same + // alternative maybe we should just replace the conditional with a jump if both targets are the same def mightEqual = val1 mightEqual val2 def mightNotEqual = val1 mightNotEqual val2 diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 4b419b210c..3704acb055 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -223,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent { debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx)) val instr = bb(idx) - // adds the instrutions that define the stack values about to be consumed to the work list to + // adds the instructions that define the stack values about to be consumed to the work list to // be marked useful def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) { debuglog(s"\t${bb1(idx1)} is consumed by $instr") diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index aa18b26d93..8f6fc65706 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -290,7 +290,7 @@ abstract class Inliners extends SubComponent { /** * A transformation local to the body of the IMethod received as argument. - * An linining decision consists in replacing a callsite with the body of the callee. + * An inlining decision consists in replacing a callsite with the body of the callee. * Please notice that, because `analyzeMethod()` itself may modify a method body, * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited * (no topological sorting over the call-graph is attempted). diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala index 26b5429e23..cb201617d2 100644 --- a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala @@ -23,8 +23,8 @@ trait FlatClassPath extends ClassFileLookup[AbstractFile] { /** Allows to get entries for packages and classes merged with sources possibly in one pass. */ private[nsc] def list(inPackage: String): FlatClassPathEntries - // A default implementation which should be overriden, if we can create more efficient - // solution for given type of FlatClassPath + // A default implementation which should be overridden, if we can create the more efficient + // solution for a given type of FlatClassPath override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 18e639b81c..c645a837cc 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -168,7 +168,7 @@ trait ScalaSettings extends AbsScalaSettings val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.") val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.") - val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)") + val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)") val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") val log = PhasesSetting ("-Ylog", "Log operations during") val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.") @@ -199,7 +199,7 @@ trait ScalaSettings extends AbsScalaSettings val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") - val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") + val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212) val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212) val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive) @@ -215,7 +215,7 @@ trait ScalaSettings extends AbsScalaSettings object YoptChoices extends MultiChoiceEnumeration { val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.") - val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessery ones.") + val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") val recurseUnreachableJumps = Choice("recurse-unreachable-jumps", "Recursively apply unreachable-code and simplify-jumps (if enabled) until reaching a fixpoint.") val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.") val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.") diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index c400e8c29c..d174dc86c7 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -30,7 +30,7 @@ trait Warnings { // Experimental lint warnings that are turned off, but which could be turned on programmatically. // These warnings are said to blind those who dare enable them. // They are not activated by -Xlint and can't be enabled on the command line. - val warnValueOverrides = { // currently turned off as experimental. creaded using constructor (new BS), so not available on the command line. + val warnValueOverrides = { // Currently turned off as experimental. Created using constructor (new BS), so not available on the command line. val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation") flag.value = false flag diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 9af3efbece..8fd2ea45e4 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -170,7 +170,7 @@ abstract class SymbolLoaders { } /** Create a new loader from a binary classfile. - * This is intented as a hook allowing to support loading symbols from + * This is intended as a hook allowing to support loading symbols from * files other than .class files. */ protected def newClassLoader(bin: AbstractFile): SymbolLoader = diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 1abbdb50b0..4d08be3c24 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -587,7 +587,7 @@ abstract class ClassfileParser { info = MethodType(newParams, clazz.tpe) } - // Note: the info may be overrwritten later with a generic signature + // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR sym setInfo info propagatePackageBoundary(jflags, sym) @@ -768,7 +768,7 @@ abstract class ClassfileParser { classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) { - parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter + parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter } ClassInfoType(parents.toList, instanceScope, sym) } diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index f471440293..362cbde04f 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -535,7 +535,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * whether `sym` denotes a param-accessor (ie a field) that fulfills all of: * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and * (b) isn't subject to specialization. We might be processing statements for: - * (b.1) the constructur in the generic (super-)class; or + * (b.1) the constructor in the generic (super-)class; or * (b.2) the constructor in the specialized (sub-)class. * (c) isn't part of a DelayedInit subclass. */ diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index f7b1021ea2..d2c511a2d1 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -9,7 +9,7 @@ import scala.reflect.internal.Symbols import scala.collection.mutable.LinkedHashMap /** - * This transformer is responisble for turning lambdas into anonymous classes. + * This transformer is responsible for turning lambdas into anonymous classes. * The main assumption it makes is that a lambda {args => body} has been turned into * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda. * Currently Uncurry is responsible for that transformation. @@ -17,7 +17,7 @@ import scala.collection.mutable.LinkedHashMap * From a lambda, Delambdafy will create * 1) a static forwarder at the top level of the class that contained the lambda * 2) a new top level class that - a) has fields and a constructor taking the captured environment (including possbily the "this" + a) has fields and a constructor taking the captured environment (including possibly the "this" * reference) * b) an apply method that calls the static forwarder * c) if needed a bridge method for the apply method @@ -99,7 +99,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre super.transform(newExpr) // when we encounter a template (basically the thing that holds body of a class/trait) - // we need to updated it to include newly created accesor methods after transforming it + // we need to updated it to include newly created accessor methods after transforming it case Template(_, _, _) => try { // during this call accessorMethods will be populated from the Function case @@ -249,7 +249,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre else "$" + funOwner.name + "$" ) val oldClassPart = oldClass.name.decode - // make sure the class name doesn't contain $anon, otherwsie isAnonymousClass/Function may be true + // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon")) val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation @@ -434,7 +434,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } /** - * Get the symbol of the target lifted lambad body method from a function. I.e. if + * Get the symbol of the target lifted lambda body method from a function. I.e. if * the function is {args => anonfun(args)} then this method returns anonfun's symbol */ private def targetMethod(fun: Function): Symbol = fun match { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index b6af19250e..efe77995cc 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -430,7 +430,7 @@ abstract class Erasure extends AddInterfaces * a name clash. The present method guards against these name clashes. * * @param member The original member - * @param other The overidden symbol for which the bridge was generated + * @param other The overridden symbol for which the bridge was generated * @param bridge The bridge */ def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = { @@ -1153,7 +1153,7 @@ abstract class Erasure extends AddInterfaces } } - /** The main transform function: Pretransfom the tree, and then + /** The main transform function: Pretransform the tree, and then * re-type it at phase erasure.next. */ override def transform(tree: Tree): Tree = { diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index c291961447..6225b486c2 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -441,7 +441,7 @@ abstract class ExplicitOuter extends InfoTransform else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5) case Select(qual, name) => - // make not private symbol acessed from inner classes, as well as + // make not private symbol accessed from inner classes, as well as // symbols accessed from @inline methods // // See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass` diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 4662ef6224..6149e40fa7 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -77,7 +77,7 @@ abstract class Flatten extends InfoTransform { if (sym.isTerm && !sym.isStaticModule) { decls1 enter sym if (sym.isModule) { - // In theory, we could assert(sym.isMethod), because nested, non-static moduls are + // In theory, we could assert(sym.isMethod), because nested, non-static modules are // transformed to methods (lateMETHOD flag added in RefChecks). But this requires // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols // too eagerly (SI-8907). diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index d69c9d9a65..fa0c1f797b 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -402,7 +402,7 @@ abstract class LambdaLift extends InfoTransform { } /* SI-6231: Something like this will be necessary to eliminate the implementation - * restiction from paramGetter above: + * restriction from paramGetter above: * We need to pass getters to the interface of an implementation class. private def fixTraitGetters(lifted: List[Tree]): List[Tree] = for (stat <- lifted) yield stat match { diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index c1c025ad48..e4082eb376 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -35,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs { */ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || ( (lo.owner != high.owner) // don't try to form pairs from overloaded members - && !high.isPrivate // private or private[this] members never are overriden + && !high.isPrivate // private or private[this] members never are overridden && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member. && relatively.matches(lo, high) ) // TODO we don't call exclude(high), should we? diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 9c81e31ad9..c86a1108b2 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1291,7 +1291,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * // even in the specialized variant, the local X class * // doesn't extend Parent$mcI$sp, since its symbol has * // been created after specialization and was not seen - * // by specialzation's info transformer. + * // by specialization's info transformer. * ... * } * } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 75d2cfe0f2..6339e7b07e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -622,7 +622,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { if (!t.symbol.isStable) { // Create a fresh type for each unstable value, since we can never correlate it to another value. - // For example `case X => case X =>` should not complaing about the second case being unreachable, + // For example `case X => case X =>` should not complain about the second case being unreachable, // if X is mutable. freshExistentialSubtype(t.tpe) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 8650f6ef90..c20e5dce63 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -363,7 +363,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT def handleUnknown(tm: TreeMaker) = handler(tm) } - // used for CSE -- rewrite all unknowns to False (the most conserative option) + // used for CSE -- rewrite all unknowns to False (the most conservative option) object conservative extends TreeMakerToProp { def handleUnknown(tm: TreeMaker) = False } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 22661d6ccf..e8c75ed1ee 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -580,7 +580,7 @@ trait MatchTranslation { // duplicated with the extractor Unapplied case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) => treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) - // SI-7868 Account for numeric widening, e.g. .toInt + // SI-7868 Account for numeric widening, e.g. .toInt case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) => treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil) case _ => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 3fd9ce76f8..6755e3726f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -517,7 +517,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) // a foldLeft to accumulate the localSubstitution left-to-right - // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { var accumSubst: Substitution = initial treeMakers foreach { maker => diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 866ca37303..5c36bd9d28 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -73,7 +73,7 @@ trait ContextErrors { // 2) provide the type of the implicit parameter for which we got diverging expansion // (pt at the point of divergence gives less information to the user) // Note: it is safe to delay error message generation in this case - // becasue we don't modify implicits' infos. + // because we don't modify implicits' infos. case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol) extends TreeTypeError { def errMsg: String = errMsgForPt(pt0) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a7d0d32c6f..ca25e59c4b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -145,7 +145,7 @@ trait Contexts { self: Analyzer => * - A variety of bits that track the current error reporting policy (more on this later); * whether or not implicits/macros are enabled, whether we are in a self or super call or * in a constructor suffix. These are represented as bits in the mask `contextMode`. - * - Some odds and ends: undetermined type pararameters of the current line of type inference; + * - Some odds and ends: undetermined type parameters of the current line of type inference; * contextual augmentation for error messages, tracking of the nesting depth. * * And behaviour: @@ -154,19 +154,19 @@ trait Contexts { self: Analyzer => * to buffer these for use in 'silent' type checking, when some recovery might be possible. * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain` * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`) - * and to collect in-scope implicit defintions (`implicitss`) + * and to collect in-scope implicit definitions (`implicitss`) * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain. - * - In a similar vein, we can assess accessiblity (`isAccessible`.) + * - In a similar vein, we can assess accessibility (`isAccessible`.) * * More on error buffering: * When are type errors recoverable? In quite a few places, it turns out. Some examples: * trying to type an application with/without the expected type, or with/without implicit views * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`. * - * Intially, starting from the `typer` phase, the contexts either buffer or report errors; + * Initially, starting from the `typer` phase, the contexts either buffer or report errors; * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more * fine grained control is needed based on the kind of error; ambiguity errors are often - * suppressed during exploraratory typing, such as determining whether `a == b` in an argument + * suppressed during exploratory typing, such as determining whether `a == b` in an argument * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to * a function type with/without implicit views. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 74c28122a1..71558273a6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -3,7 +3,7 @@ * @author Martin Odersky */ -//todo: rewrite or disllow new T where T is a mixin (currently: not a member of T) +//todo: rewrite or disallow new T where T is a mixin (currently: not a member of T) //todo: use inherited type info also for vars and values //todo: disallow C#D in superclass //todo: treat :::= correctly @@ -159,7 +159,7 @@ trait Implicits { * @param tree The tree representing the implicit * @param subst A substituter that represents the undetermined type parameters * that were instantiated by the winning implicit. - * @param undetparams undeterminted type parameters + * @param undetparams undetermined type parameters */ class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) { override def toString = "SearchResult(%s, %s)".format(tree, diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 8979b26719..cf97474d9a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1017,7 +1017,7 @@ trait Infer extends Checkable { /** Substitute free type variables `undetparams` of type constructor * `tree` in pattern, given prototype `pt`. * - * @param tree the constuctor that needs to be instantiated + * @param tree the constructor that needs to be instantiated * @param undetparams the undetermined type parameters * @param pt0 the expected result type of the instance */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 0bb94be636..711cfba24d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -171,7 +171,7 @@ trait Namers extends MethodSynthesis { val newFlags = (sym.flags & LOCKED) | flags sym.rawInfo match { case tr: TypeRef => - // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef + // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef // over this mutated symbol, and we witness a stale cache for `parents`. tr.invalidateCaches() case _ => diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index b6387fd56b..50f658f68d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -384,7 +384,7 @@ trait NamesDefaults { self: Analyzer => * of arguments. * * @param args The list of arguments - * @param params The list of parameter sybols of the invoked method + * @param params The list of parameter symbols of the invoked method * @param argName A function that extracts the name of an argument expression, if it is a named argument. */ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index bb8c3c3c6d..fa4a764f1b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -336,7 +336,7 @@ trait PatternTypers { val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args)) // must call doTypedUnapply directly, as otherwise we get undesirable rewrites // and re-typechecks of the target of the unapply call in PATTERNmode, - // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object, + // this breaks down when the classTagExtractor (which defines the unapply member) is not a simple reference to an object, // but an arbitrary tree as is the case here val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt) diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 1daff02c23..d2046a158c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -171,7 +171,7 @@ trait SyntheticMethods extends ast.TreeDSL { def thatCast(eqmeth: Symbol): Tree = gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe) - /* The equality method core for case classes and inline clases. + /* The equality method core for case classes and inline classes. * 1+ args: * (that.isInstanceOf[this.C]) && { * val x$1 = that.asInstanceOf[this.C] diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index aaa75b5ee1..bc879eefcf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -946,7 +946,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // Ignore type errors raised in later phases that are due to mismatching types with existential skolems // We have lift crashing in 2.9 with an adapt failure in the pattern matcher. - // Here's my hypothsis why this happens. The pattern matcher defines a variable of type + // Here's my hypothesis why this happens. The pattern matcher defines a variable of type // // val x: T = expr // diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index ba44126df2..352816803f 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -8,7 +8,7 @@ package util import scala.reflect.internal.Chars._ -/** Utilitity methods for doc comment strings +/** Utility methods for doc comment strings */ object DocStrings { -- cgit v1.2.3