diff options
140 files changed, 233 insertions, 233 deletions
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md index aca1c63d22..65d79dd5f4 100644 --- a/spec/04-basic-declarations-and-definitions.md +++ b/spec/04-basic-declarations-and-definitions.md @@ -631,7 +631,7 @@ For every parameter $p_{i,j}$ with a default argument a method named expression. Here, $n$ denotes the parameter's position in the method declaration. These methods are parametrized by the type parameter clause `[$\mathit{tps}\,$]` and all value parameter clauses -`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceeding $p_{i,j}$. +`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$. The `$f\$$default$\$$n` methods are inaccessible for user programs. diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index ef854b2abb..fd20d6ae2c 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -128,7 +128,7 @@ consists of the following steps. occurrence in the linearization. - Finally the statement sequence $\mathit{stats}\,$ is evaluated. -###### Delayed Initializaton +###### Delayed Initialization The initialization code of an object or class (but not a trait) that follows the superclass constructor invocation and the mixin-evaluation of the template's base diff --git a/spec/06-expressions.md b/spec/06-expressions.md index afd1492744..da41651a74 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -666,7 +666,7 @@ followed by operators starting with ``|`', etc. There's one exception to this rule, which concerns [_assignment operators_](#assignment-operators). -The precedence of an assigment operator is the same as the one +The precedence of an assignment operator is the same as the one of simple assignment `(=)`. That is, it is lower than the precedence of any other operator. @@ -1761,7 +1761,7 @@ trait Dynamic { ``` Assume a selection of the form $e.x$ where the type of $e$ conforms to `scala.Dynamic`. -Further assuming the selection is not followed by any function arguments, such an expression can be rewitten under the conditions given [here](#implicit-conversions) to: +Further assuming the selection is not followed by any function arguments, such an expression can be rewritten under the conditions given [here](#implicit-conversions) to: ```scala $e$.applyDynamic("$x$") diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md index 3538457b5c..e75bddc096 100644 --- a/spec/08-pattern-matching.md +++ b/spec/08-pattern-matching.md @@ -609,7 +609,7 @@ case class If[T](c: Term[Boolean], There are terms to represent numeric literals, incrementation, a zero test, and a conditional. Every term carries as a type parameter the -type of the expression it representes (either `Int` or `Boolean`). +type of the expression it represents (either `Int` or `Boolean`). A type-safe evaluator for such terms can be written as follows. diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md index 847fab548d..e3185d8b7d 100644 --- a/spec/09-top-level-definitions.md +++ b/spec/09-top-level-definitions.md @@ -159,7 +159,7 @@ The `main` method of a program can be directly defined in the object, or it can be inherited. The scala library defines a special class `scala.App` whose body acts as a `main` method. An objects $m$ inheriting from this class is thus a program, -which executes the initializaton code of the object $m$. +which executes the initialization code of the object $m$. ###### Example The following example will create a hello world program by defining diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala index 75160fa18f..293335f720 100644 --- a/src/actors/scala/actors/Actor.scala +++ b/src/actors/scala/actors/Actor.scala @@ -205,7 +205,7 @@ object Actor extends Combinators { * Actions in `f` have to contain the rest of the computation of `self`, * as this method will never return. * - * A common method of continuting the computation is to send a message + * A common method of continuing the computation is to send a message * to another actor: * {{{ * react { diff --git a/src/actors/scala/actors/LinkedQueue.java b/src/actors/scala/actors/LinkedQueue.java index 796f428cf5..3f7b93c386 100644 --- a/src/actors/scala/actors/LinkedQueue.java +++ b/src/actors/scala/actors/LinkedQueue.java @@ -22,7 +22,7 @@ package scala.actors; * and takes when the queue is not empty. * Normally a put and a take can proceed simultaneously. * (Although it does not allow multiple concurrent puts or takes.) - * This class tends to perform more efficently than + * This class tends to perform more efficiently than * other Channel implementations in producer/consumer * applications. * <p>[<a href="http://gee.cs.oswego.edu/dl/classes/EDU/oswego/cs/dl/util/concurrent/intro.html"> Introduction to this package. </a>] diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala index 9949b36181..2cb03544f2 100644 --- a/src/actors/scala/actors/remote/Proxy.scala +++ b/src/actors/scala/actors/remote/Proxy.scala @@ -84,7 +84,7 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net } // Proxy is private[remote], but these classes are public and use it in a public -// method signature. That makes the only method they have non-overriddable. +// method signature. That makes the only method they have non-overridable. // So I made them final, which seems appropriate anyway. final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable { diff --git a/src/actors/scala/actors/threadpool/AbstractCollection.java b/src/actors/scala/actors/threadpool/AbstractCollection.java index f3dc1e1292..195a0064ab 100644 --- a/src/actors/scala/actors/threadpool/AbstractCollection.java +++ b/src/actors/scala/actors/threadpool/AbstractCollection.java @@ -1,6 +1,6 @@ /* * Written by Dawid Kurzyniec, based on public domain code written by Doug Lea - * and publictly available documentation, and released to the public domain, as + * and publicly available documentation, and released to the public domain, as * explained at http://creativecommons.org/licenses/publicdomain */ diff --git a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java index 9a4a4fb71c..02e9bbe297 100644 --- a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java +++ b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java @@ -135,7 +135,7 @@ public class ExecutorCompletionService implements CompletionService { * @param completionQueue the queue to use as the completion queue * normally one dedicated for use by this service. This queue is * treated as unbounded -- failed attempted <tt>Queue.add</tt> - * operations for completed taskes cause them not to be + * operations for completed tasks cause them not to be * retrievable. * @throws NullPointerException if executor or completionQueue are <tt>null</tt> */ diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java index 437af77c7a..914d242100 100644 --- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java +++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java @@ -20,13 +20,13 @@ import scala.actors.threadpool.helpers.*; * * <p>The order of entry * to the read and write lock is unspecified, subject to reentrancy - * constraints. A nonfair lock that is continously contended may + * constraints. A nonfair lock that is continuously contended may * indefinitely postpone one or more reader or writer threads, but * will normally have higher throughput than a fair lock. * <p> * * DEPARTURE FROM java.util.concurrent: this implementation impose - * a writer-preferrence and thus its acquisition order may be different + * a writer-preference and thus its acquisition order may be different * than in java.util.concurrent. * * <li><b>Reentrancy</b> diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java index 5d5529ce74..c094eba408 100644 --- a/src/asm/scala/tools/asm/Label.java +++ b/src/asm/scala/tools/asm/Label.java @@ -545,7 +545,7 @@ public class Label { } // ------------------------------------------------------------------------ - // Overriden Object methods + // Overridden Object methods // ------------------------------------------------------------------------ /** diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java index 0134555f10..ff840aabde 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java +++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java @@ -375,7 +375,7 @@ public class Analyzer<V extends Value> implements Opcodes { * instruction of the method. The size of the returned array is * equal to the number of instructions (and labels) of the method. A * given frame is <tt>null</tt> if the corresponding instruction - * cannot be reached, or if an error occured during the analysis of + * cannot be reached, or if an error occurred during the analysis of * the method. */ public Frame<V>[] getFrames() { @@ -435,7 +435,7 @@ public class Analyzer<V extends Value> implements Opcodes { /** * Creates a control flow graph edge. The default implementation of this - * method does nothing. It can be overriden in order to construct the + * method does nothing. It can be overridden in order to construct the * control flow graph of a method (this method is called by the * {@link #analyze analyze} method during its visit of the method's code). * diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java index 56f4bedc00..00fe6c8bff 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java @@ -82,7 +82,7 @@ public abstract class Interpreter<V extends Value> { * the bytecode instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V newOperation(AbstractInsnNode insn) throws AnalyzerException; @@ -101,7 +101,7 @@ public abstract class Interpreter<V extends Value> { * @return the result of the interpretation of the given instruction. The * returned value must be <tt>equal</tt> to the given value. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V copyOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -122,7 +122,7 @@ public abstract class Interpreter<V extends Value> { * the argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V unaryOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -146,7 +146,7 @@ public abstract class Interpreter<V extends Value> { * the second argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2) throws AnalyzerException; @@ -167,7 +167,7 @@ public abstract class Interpreter<V extends Value> { * the third argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V ternaryOperation(AbstractInsnNode insn, V value1, V value2, V value3) throws AnalyzerException; @@ -185,7 +185,7 @@ public abstract class Interpreter<V extends Value> { * the arguments of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract V naryOperation(AbstractInsnNode insn, List<? extends V> values) throws AnalyzerException; @@ -203,7 +203,7 @@ public abstract class Interpreter<V extends Value> { * @param expected * the expected return type of the analyzed method. * @throws AnalyzerException - * if an error occured during the interpretation. + * if an error occurred during the interpretation. */ public abstract void returnOperation(AbstractInsnNode insn, V value, V expected) throws AnalyzerException; diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java index 4135672c6b..773f129ad9 100644 --- a/src/asm/scala/tools/asm/util/Printer.java +++ b/src/asm/scala/tools/asm/util/Printer.java @@ -181,7 +181,7 @@ public abstract class Printer { */ public Printer visitClassTypeAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -264,7 +264,7 @@ public abstract class Printer { */ public Printer visitFieldTypeAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -287,7 +287,7 @@ public abstract class Printer { * {@link scala.tools.asm.MethodVisitor#visitParameter(String, int)}. */ public void visitParameter(String name, int access) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -309,7 +309,7 @@ public abstract class Printer { */ public Printer visitMethodTypeAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -380,7 +380,7 @@ public abstract class Printer { visitMethodInsn(opcode, owner, name, desc, itf); return; } - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -397,7 +397,7 @@ public abstract class Printer { visitMethodInsn(opcode, owner, name, desc); return; } - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -457,7 +457,7 @@ public abstract class Printer { */ public Printer visitInsnAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -473,7 +473,7 @@ public abstract class Printer { */ public Printer visitTryCatchAnnotation(final int typeRef, final TypePath typePath, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** @@ -491,7 +491,7 @@ public abstract class Printer { public Printer visitLocalVariableAnnotation(final int typeRef, final TypePath typePath, final Label[] start, final Label[] end, final int[] index, final String desc, final boolean visible) { - throw new RuntimeException("Must be overriden"); + throw new RuntimeException("Must be overridden"); } /** diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 8905c94eeb..13bf0ef4c6 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -479,7 +479,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Tests if a file exists and prints a warning in case it doesn't. Always * returns the file, even if it doesn't exist. - * @param file A file to test for existance. + * @param file A file to test for existence. * @return The same file. */ protected def existing(file: File): File = { if (!file.exists) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 733664c30a..1c9dbad4dd 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -234,7 +234,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Called by ScalaDocAnalyzer when a doc comment has been parsed. */ def signalParsedDocComment(comment: String, pos: Position) = { - // TODO: this is all very borken (only works for scaladoc comments, not regular ones) + // TODO: this is all very broken (only works for scaladoc comments, not regular ones) // --> add hooks to parser and refactor Interactive global to handle comments directly // in any case don't use reporter for parser hooks reporter.comment(pos, comment) @@ -1461,7 +1461,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } - /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */ + /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions /** Compile list of source files, diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 1eb6c9da2c..e1cfa63960 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -18,7 +18,7 @@ trait PhaseAssembly { /** * Aux datastructure for solving the constraint system - * The depency graph container with helper methods for node and edge creation + * The dependency graph container with helper methods for node and edge creation */ private class DependencyGraph { diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index d9f56b47fa..f385acdb41 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1077,7 +1077,7 @@ abstract class GenICode extends SubComponent { () case (_, UNIT) => ctx.bb.emit(DROP(from), pos) - // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem + // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => coerce(from, to) case _ => diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index f81c42d836..27bf836484 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -60,7 +60,7 @@ trait Primitives { self: ICodes => // type : (buf,el) => buf // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR } - // jvm : It should call the appropiate 'append' method on StringBuffer + // jvm : It should call the appropriate 'append' method on StringBuffer case class StringConcat(el: TypeKind) extends Primitive /** Signals the beginning of a series of concatenations. diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 676ee12683..b0ad5bdaf9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -332,13 +332,13 @@ abstract class TypeFlowAnalysis { `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time. Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next). - A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining. + A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining. But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks. In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite. Those basic blocks not in that subgraph can be skipped altogether. That's why: - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs` - same check is performed before adding a block to the worklist, and as part of choosing successors. - The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis. + The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis. The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index 7c95b7fc3b..b94208c1a5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -284,7 +284,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - // a note on terminology: this is not "postHandlers", despite appearences. + // a note on terminology: this is not "postHandlers", despite appearances. // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. if (hasFinally) { nopIfNeeded(startTryBody) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 53ac5bfdc7..7defd7c873 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -11,7 +11,7 @@ import asm.Opcodes /** * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information - * that is required after building the ASM nodes. This includes optimizations, geneartion of + * that is required after building the ASM nodes. This includes optimizations, generation of * InnerClass attributes and generation of stack map frames. * * This representation is immutable and independent of the compiler data structures, hence it can @@ -49,7 +49,7 @@ abstract class BTypes { import coreBTypes._ /** - * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType + * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType * referring to BTypes. */ /*sealed*/ trait BType { // Not sealed for now due to SI-8546 @@ -369,7 +369,7 @@ abstract class BTypes { * * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class * - static initializer: executed before constructor body - * - instance initializer: exectued when class is initialized (instance creation, static + * - instance initializer: executed when class is initialized (instance creation, static * field access, ...) * * - A static nested class can be defined as @@ -540,7 +540,7 @@ abstract class BTypes { * * class A { * void f() { class B {} } - * static void g() { calss C {} } + * static void g() { class C {} } * } * * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. @@ -820,7 +820,7 @@ abstract class BTypes { * * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes - * a source-level propety: if the class is in a static context (does not have an outer pointer). + * a source-level property: if the class is in a static context (does not have an outer pointer). * This is checked when building the NestedInfo. */ case class NestedInfo(enclosingClass: ClassBType, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 3b7cbd6392..e3b812f413 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -10,7 +10,7 @@ import scala.tools.asm /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary - * information from a symbol and its type to create the correpsonding ClassBType. It requires + * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). * * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index fac3c93be2..246235f395 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -4,7 +4,7 @@ package backend.jvm import scala.annotation.switch /** - * Core BTypes and some other definitions. The initialization of these definitions requies access + * Core BTypes and some other definitions. The initialization of these definitions requires access * to symbols / types (global). * * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To @@ -18,11 +18,11 @@ import scala.annotation.switch * * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned + * constructor will actually go through the proxy. The lazy vals make sure the instance is assigned * in the proxy before the fields are initialized. * * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are + * could not be a perRunCache anymore: the classes defined here need to be in that map, they are * added when the ClassBTypes are created. The per run cache removes them, so they would be missing * in the second run. */ @@ -192,7 +192,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { } /** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core + * This trait make some core BTypes available that don't depend on a Global instance. Some core * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. * * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index e56a20c2e7..4373d997b8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2947,7 +2947,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => } // end of class JBeanInfoBuilder /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for. - * In particualr, IMethod.normalize() doesn't collapseJumpChains(). + * In particular, IMethod.normalize() doesn't collapseJumpChains(). * * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them). */ @@ -3162,7 +3162,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => } } - // remove the unusued exception handler references + // remove the unused exception handler references if (settings.debug) for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") m.exh = m.exh filterNot unusedExceptionHandlers diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 273112b93c..3bd2e5c4f4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -19,16 +19,16 @@ import scala.tools.nsc.settings.ScalaSettings * Optimizations within a single method. * * unreachable code - * - removes instrucions of basic blocks to which no branch instruction points + * - removes instructions of basic blocks to which no branch instruction points * + enables eliminating some exception handlers and local variable descriptors * > eliminating them is required for correctness, as explained in `removeUnreachableCode` * * empty exception handlers * - removes exception handlers whose try block is empty * + eliminating a handler where the try block is empty and reachable will turn the catch block - * unreachble. in this case "unreachable code" is invoked recursively until reaching a fixpiont. + * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint. * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the - * catch block, and the recrusive invocation is not necessary. + * catch block, and the recursive invocation is not necessary. * * simplify jumps * - various simplifications, see doc domments of individual optimizations @@ -52,7 +52,7 @@ class LocalOpt(settings: ScalaSettings) { * cleanups to the bytecode. * * @param clazz The class whose methods are optimized - * @return `true` if unreachable code was elminated in some method, `false` otherwise. + * @return `true` if unreachable code was eliminated in some method, `false` otherwise. */ def methodOptimizations(clazz: ClassNode): Boolean = { settings.Yopt.value.nonEmpty && clazz.methods.asScala.foldLeft(false) { @@ -66,7 +66,7 @@ class LocalOpt(settings: ScalaSettings) { * We rely on dead code elimination provided by the ASM framework, as described in the ASM User * Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only * computes Frame information for reachable instructions. Instructions for which no Frame data is - * available after the analyis are unreachable. + * available after the analysis are unreachable. * * Also simplifies branching instructions, removes unused local variable descriptors, empty * exception handlers, unnecessary label declarations and empty line number nodes. @@ -240,7 +240,7 @@ class LocalOpt(settings: ScalaSettings) { } /** - * The number of local varialbe slots used for parameters and for the `this` reference. + * The number of local variable slots used for parameters and for the `this` reference. */ private def parametersSize(method: MethodNode): Int = { // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for @@ -322,7 +322,7 @@ class LocalOpt(settings: ScalaSettings) { * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM * framework only computes these values during bytecode generation. * - * Sicne there's currently no better way, we run a bytecode generator on the method and extract + * Since there's currently no better way, we run a bytecode generator on the method and extract * the computed values. This required changes to the ASM codebase: * - the [[MethodWriter]] class was made public * - accessors for maxLocals / maxStack were added to the MethodWriter class @@ -345,7 +345,7 @@ class LocalOpt(settings: ScalaSettings) { * Removes LineNumberNodes that don't describe any executable instructions. * * This method expects (and asserts) that the `start` label of each LineNumberNode is the - * lexically preceeding label declaration. + * lexically preceding label declaration. */ def removeEmptyLineNumbers(method: MethodNode): Boolean = { def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match { @@ -510,7 +510,7 @@ class LocalOpt(settings: ScalaSettings) { * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...] * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...] * - * Note that no label definitions are allowed in the first [nops] section. Otherwsie, there could + * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could * be some other jump to the GOTO, and eliminating it would change behavior. * * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option @@ -542,7 +542,7 @@ class LocalOpt(settings: ScalaSettings) { * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW * * inlining is only done if the GOTO instruction is not part of a try block, otherwise the - * rewrite might change the behavior. For xRETURN, the reason is that return insructions may throw + * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw * an IllegalMonitorStateException, as described here: * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index c6e699373b..0e6ee76eb2 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -18,7 +18,7 @@ import scala.annotation.tailrec * * With some more work it could be extended to * - cache stable values (final fields, modules) in locals - * - replace the copy propagation in ClosureElilmination + * - replace the copy propagation in ClosureElimination * - fold constants * - eliminate unnecessary stores and loads * - propagate knowledge gathered from conditionals for further optimization @@ -437,7 +437,7 @@ abstract class ConstantOptimization extends SubComponent { // TODO if we do all that we need to be careful in the // case that success and failure are the same target block // because we're using a Map and don't want one possible state to clobber the other - // alternative mayb we should just replace the conditional with a jump if both targets are the same + // alternative maybe we should just replace the conditional with a jump if both targets are the same def mightEqual = val1 mightEqual val2 def mightNotEqual = val1 mightNotEqual val2 diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 4b419b210c..3704acb055 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -223,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent { debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx)) val instr = bb(idx) - // adds the instrutions that define the stack values about to be consumed to the work list to + // adds the instructions that define the stack values about to be consumed to the work list to // be marked useful def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) { debuglog(s"\t${bb1(idx1)} is consumed by $instr") diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index aa18b26d93..8f6fc65706 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -290,7 +290,7 @@ abstract class Inliners extends SubComponent { /** * A transformation local to the body of the IMethod received as argument. - * An linining decision consists in replacing a callsite with the body of the callee. + * An inlining decision consists in replacing a callsite with the body of the callee. * Please notice that, because `analyzeMethod()` itself may modify a method body, * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited * (no topological sorting over the call-graph is attempted). diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala index 26b5429e23..cb201617d2 100644 --- a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala @@ -23,8 +23,8 @@ trait FlatClassPath extends ClassFileLookup[AbstractFile] { /** Allows to get entries for packages and classes merged with sources possibly in one pass. */ private[nsc] def list(inPackage: String): FlatClassPathEntries - // A default implementation which should be overriden, if we can create more efficient - // solution for given type of FlatClassPath + // A default implementation which should be overridden, if we can create the more efficient + // solution for a given type of FlatClassPath override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 18e639b81c..c645a837cc 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -168,7 +168,7 @@ trait ScalaSettings extends AbsScalaSettings val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.") val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.") - val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)") + val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)") val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") val log = PhasesSetting ("-Ylog", "Log operations during") val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.") @@ -199,7 +199,7 @@ trait ScalaSettings extends AbsScalaSettings val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") - val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") + val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212) val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212) val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive) @@ -215,7 +215,7 @@ trait ScalaSettings extends AbsScalaSettings object YoptChoices extends MultiChoiceEnumeration { val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.") - val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessery ones.") + val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") val recurseUnreachableJumps = Choice("recurse-unreachable-jumps", "Recursively apply unreachable-code and simplify-jumps (if enabled) until reaching a fixpoint.") val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.") val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.") diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index c400e8c29c..d174dc86c7 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -30,7 +30,7 @@ trait Warnings { // Experimental lint warnings that are turned off, but which could be turned on programmatically. // These warnings are said to blind those who dare enable them. // They are not activated by -Xlint and can't be enabled on the command line. - val warnValueOverrides = { // currently turned off as experimental. creaded using constructor (new BS), so not available on the command line. + val warnValueOverrides = { // Currently turned off as experimental. Created using constructor (new BS), so not available on the command line. val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation") flag.value = false flag diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 9af3efbece..8fd2ea45e4 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -170,7 +170,7 @@ abstract class SymbolLoaders { } /** Create a new loader from a binary classfile. - * This is intented as a hook allowing to support loading symbols from + * This is intended as a hook allowing to support loading symbols from * files other than .class files. */ protected def newClassLoader(bin: AbstractFile): SymbolLoader = diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 1abbdb50b0..4d08be3c24 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -587,7 +587,7 @@ abstract class ClassfileParser { info = MethodType(newParams, clazz.tpe) } - // Note: the info may be overrwritten later with a generic signature + // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR sym setInfo info propagatePackageBoundary(jflags, sym) @@ -768,7 +768,7 @@ abstract class ClassfileParser { classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) { - parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter + parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter } ClassInfoType(parents.toList, instanceScope, sym) } diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index f471440293..362cbde04f 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -535,7 +535,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * whether `sym` denotes a param-accessor (ie a field) that fulfills all of: * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and * (b) isn't subject to specialization. We might be processing statements for: - * (b.1) the constructur in the generic (super-)class; or + * (b.1) the constructor in the generic (super-)class; or * (b.2) the constructor in the specialized (sub-)class. * (c) isn't part of a DelayedInit subclass. */ diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index f7b1021ea2..d2c511a2d1 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -9,7 +9,7 @@ import scala.reflect.internal.Symbols import scala.collection.mutable.LinkedHashMap /** - * This transformer is responisble for turning lambdas into anonymous classes. + * This transformer is responsible for turning lambdas into anonymous classes. * The main assumption it makes is that a lambda {args => body} has been turned into * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda. * Currently Uncurry is responsible for that transformation. @@ -17,7 +17,7 @@ import scala.collection.mutable.LinkedHashMap * From a lambda, Delambdafy will create * 1) a static forwarder at the top level of the class that contained the lambda * 2) a new top level class that - a) has fields and a constructor taking the captured environment (including possbily the "this" + a) has fields and a constructor taking the captured environment (including possibly the "this" * reference) * b) an apply method that calls the static forwarder * c) if needed a bridge method for the apply method @@ -99,7 +99,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre super.transform(newExpr) // when we encounter a template (basically the thing that holds body of a class/trait) - // we need to updated it to include newly created accesor methods after transforming it + // we need to updated it to include newly created accessor methods after transforming it case Template(_, _, _) => try { // during this call accessorMethods will be populated from the Function case @@ -249,7 +249,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre else "$" + funOwner.name + "$" ) val oldClassPart = oldClass.name.decode - // make sure the class name doesn't contain $anon, otherwsie isAnonymousClass/Function may be true + // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon")) val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation @@ -434,7 +434,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } /** - * Get the symbol of the target lifted lambad body method from a function. I.e. if + * Get the symbol of the target lifted lambda body method from a function. I.e. if * the function is {args => anonfun(args)} then this method returns anonfun's symbol */ private def targetMethod(fun: Function): Symbol = fun match { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index b6af19250e..efe77995cc 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -430,7 +430,7 @@ abstract class Erasure extends AddInterfaces * a name clash. The present method guards against these name clashes. * * @param member The original member - * @param other The overidden symbol for which the bridge was generated + * @param other The overridden symbol for which the bridge was generated * @param bridge The bridge */ def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = { @@ -1153,7 +1153,7 @@ abstract class Erasure extends AddInterfaces } } - /** The main transform function: Pretransfom the tree, and then + /** The main transform function: Pretransform the tree, and then * re-type it at phase erasure.next. */ override def transform(tree: Tree): Tree = { diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index c291961447..6225b486c2 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -441,7 +441,7 @@ abstract class ExplicitOuter extends InfoTransform else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5) case Select(qual, name) => - // make not private symbol acessed from inner classes, as well as + // make not private symbol accessed from inner classes, as well as // symbols accessed from @inline methods // // See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass` diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 4662ef6224..6149e40fa7 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -77,7 +77,7 @@ abstract class Flatten extends InfoTransform { if (sym.isTerm && !sym.isStaticModule) { decls1 enter sym if (sym.isModule) { - // In theory, we could assert(sym.isMethod), because nested, non-static moduls are + // In theory, we could assert(sym.isMethod), because nested, non-static modules are // transformed to methods (lateMETHOD flag added in RefChecks). But this requires // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols // too eagerly (SI-8907). diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index d69c9d9a65..fa0c1f797b 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -402,7 +402,7 @@ abstract class LambdaLift extends InfoTransform { } /* SI-6231: Something like this will be necessary to eliminate the implementation - * restiction from paramGetter above: + * restriction from paramGetter above: * We need to pass getters to the interface of an implementation class. private def fixTraitGetters(lifted: List[Tree]): List[Tree] = for (stat <- lifted) yield stat match { diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index c1c025ad48..e4082eb376 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -35,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs { */ override protected def matches(lo: Symbol, high: Symbol) = lo.isType || ( (lo.owner != high.owner) // don't try to form pairs from overloaded members - && !high.isPrivate // private or private[this] members never are overriden + && !high.isPrivate // private or private[this] members never are overridden && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member. && relatively.matches(lo, high) ) // TODO we don't call exclude(high), should we? diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 9c81e31ad9..c86a1108b2 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1291,7 +1291,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * // even in the specialized variant, the local X class * // doesn't extend Parent$mcI$sp, since its symbol has * // been created after specialization and was not seen - * // by specialzation's info transformer. + * // by specialization's info transformer. * ... * } * } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 75d2cfe0f2..6339e7b07e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -622,7 +622,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { if (!t.symbol.isStable) { // Create a fresh type for each unstable value, since we can never correlate it to another value. - // For example `case X => case X =>` should not complaing about the second case being unreachable, + // For example `case X => case X =>` should not complain about the second case being unreachable, // if X is mutable. freshExistentialSubtype(t.tpe) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 8650f6ef90..c20e5dce63 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -363,7 +363,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT def handleUnknown(tm: TreeMaker) = handler(tm) } - // used for CSE -- rewrite all unknowns to False (the most conserative option) + // used for CSE -- rewrite all unknowns to False (the most conservative option) object conservative extends TreeMakerToProp { def handleUnknown(tm: TreeMaker) = False } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 22661d6ccf..e8c75ed1ee 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -580,7 +580,7 @@ trait MatchTranslation { // duplicated with the extractor Unapplied case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) => treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) - // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt + // SI-7868 Account for numeric widening, e.g. <unapplySelector>.toInt case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) => treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil) case _ => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 3fd9ce76f8..6755e3726f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -517,7 +517,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) // a foldLeft to accumulate the localSubstitution left-to-right - // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { var accumSubst: Substitution = initial treeMakers foreach { maker => diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 866ca37303..5c36bd9d28 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -73,7 +73,7 @@ trait ContextErrors { // 2) provide the type of the implicit parameter for which we got diverging expansion // (pt at the point of divergence gives less information to the user) // Note: it is safe to delay error message generation in this case - // becasue we don't modify implicits' infos. + // because we don't modify implicits' infos. case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol) extends TreeTypeError { def errMsg: String = errMsgForPt(pt0) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a7d0d32c6f..ca25e59c4b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -145,7 +145,7 @@ trait Contexts { self: Analyzer => * - A variety of bits that track the current error reporting policy (more on this later); * whether or not implicits/macros are enabled, whether we are in a self or super call or * in a constructor suffix. These are represented as bits in the mask `contextMode`. - * - Some odds and ends: undetermined type pararameters of the current line of type inference; + * - Some odds and ends: undetermined type parameters of the current line of type inference; * contextual augmentation for error messages, tracking of the nesting depth. * * And behaviour: @@ -154,19 +154,19 @@ trait Contexts { self: Analyzer => * to buffer these for use in 'silent' type checking, when some recovery might be possible. * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain` * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`) - * and to collect in-scope implicit defintions (`implicitss`) + * and to collect in-scope implicit definitions (`implicitss`) * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain. - * - In a similar vein, we can assess accessiblity (`isAccessible`.) + * - In a similar vein, we can assess accessibility (`isAccessible`.) * * More on error buffering: * When are type errors recoverable? In quite a few places, it turns out. Some examples: * trying to type an application with/without the expected type, or with/without implicit views * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`. * - * Intially, starting from the `typer` phase, the contexts either buffer or report errors; + * Initially, starting from the `typer` phase, the contexts either buffer or report errors; * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more * fine grained control is needed based on the kind of error; ambiguity errors are often - * suppressed during exploraratory typing, such as determining whether `a == b` in an argument + * suppressed during exploratory typing, such as determining whether `a == b` in an argument * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to * a function type with/without implicit views. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 74c28122a1..71558273a6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -3,7 +3,7 @@ * @author Martin Odersky */ -//todo: rewrite or disllow new T where T is a mixin (currently: <init> not a member of T) +//todo: rewrite or disallow new T where T is a mixin (currently: <init> not a member of T) //todo: use inherited type info also for vars and values //todo: disallow C#D in superclass //todo: treat :::= correctly @@ -159,7 +159,7 @@ trait Implicits { * @param tree The tree representing the implicit * @param subst A substituter that represents the undetermined type parameters * that were instantiated by the winning implicit. - * @param undetparams undeterminted type parameters + * @param undetparams undetermined type parameters */ class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) { override def toString = "SearchResult(%s, %s)".format(tree, diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 8979b26719..cf97474d9a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1017,7 +1017,7 @@ trait Infer extends Checkable { /** Substitute free type variables `undetparams` of type constructor * `tree` in pattern, given prototype `pt`. * - * @param tree the constuctor that needs to be instantiated + * @param tree the constructor that needs to be instantiated * @param undetparams the undetermined type parameters * @param pt0 the expected result type of the instance */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 0bb94be636..711cfba24d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -171,7 +171,7 @@ trait Namers extends MethodSynthesis { val newFlags = (sym.flags & LOCKED) | flags sym.rawInfo match { case tr: TypeRef => - // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef + // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef // over this mutated symbol, and we witness a stale cache for `parents`. tr.invalidateCaches() case _ => diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index b6387fd56b..50f658f68d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -384,7 +384,7 @@ trait NamesDefaults { self: Analyzer => * of arguments. * * @param args The list of arguments - * @param params The list of parameter sybols of the invoked method + * @param params The list of parameter symbols of the invoked method * @param argName A function that extracts the name of an argument expression, if it is a named argument. */ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index bb8c3c3c6d..fa4a764f1b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -336,7 +336,7 @@ trait PatternTypers { val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args)) // must call doTypedUnapply directly, as otherwise we get undesirable rewrites // and re-typechecks of the target of the unapply call in PATTERNmode, - // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object, + // this breaks down when the classTagExtractor (which defines the unapply member) is not a simple reference to an object, // but an arbitrary tree as is the case here val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt) diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 1daff02c23..d2046a158c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -171,7 +171,7 @@ trait SyntheticMethods extends ast.TreeDSL { def thatCast(eqmeth: Symbol): Tree = gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe) - /* The equality method core for case classes and inline clases. + /* The equality method core for case classes and inline classes. * 1+ args: * (that.isInstanceOf[this.C]) && { * val x$1 = that.asInstanceOf[this.C] diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index aaa75b5ee1..bc879eefcf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -946,7 +946,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // Ignore type errors raised in later phases that are due to mismatching types with existential skolems // We have lift crashing in 2.9 with an adapt failure in the pattern matcher. - // Here's my hypothsis why this happens. The pattern matcher defines a variable of type + // Here's my hypothesis why this happens. The pattern matcher defines a variable of type // // val x: T = expr // diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index ba44126df2..352816803f 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -8,7 +8,7 @@ package util import scala.reflect.internal.Chars._ -/** Utilitity methods for doc comment strings +/** Utility methods for doc comment strings */ object DocStrings { diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 5d00141e6a..1343c469db 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -522,7 +522,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") /** The current presentation compiler runner */ @volatile private[interactive] var compileRunner: Thread = newRunnerThread() - /** Check that the currenyly executing thread is the presentation compiler thread. + /** Check that the currently executing thread is the presentation compiler thread. * * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase) */ @@ -1188,7 +1188,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - /** Parses and enters given source file, stroring parse tree in response */ + /** Parses and enters given source file, storing parse tree in response */ private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) { respond(response) { onUnitOf(source) { unit => diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala index 83f3fab925..ddc0c8a068 100644 --- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala +++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala @@ -6,7 +6,7 @@ import scala.language.implicitConversions import scala.reflect.ClassTag /** An abstract class for writing and reading Scala objects to and - * from a legible representation. The presesentation follows the following grammar: + * from a legible representation. The representation follows the following grammar: * {{{ * Pickled = `true` | `false` | `null` | NumericLit | StringLit | * Labelled | Pickled `,` Pickled @@ -85,7 +85,7 @@ abstract class Pickler[T] { object Pickler { /** A base class representing unpickler result. It has two subclasses: - * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, + * `UnpickleSuccess` for successful unpicklings and `UnpickleFailure` for failures, * where a value of the given type `T` could not be unpickled from input. * @tparam T the type of unpickled values in case of success. */ @@ -154,7 +154,7 @@ object Pickler { */ def pkl[T: Pickler] = implicitly[Pickler[T]] - /** A class represenenting `~`-pairs */ + /** A class representing `~`-pairs */ case class ~[+S, +T](fst: S, snd: T) /** A wrapper class to be able to use `~` s an infix method */ diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 41224f4c6c..f134f5ce3d 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -125,8 +125,8 @@ sealed abstract class Option[+A] extends Product with Serializable { * Although the use of null is discouraged, code written to use * $option must often interface with code that expects and returns nulls. * @example {{{ - * val initalText: Option[String] = getInitialText - * val textField = new JComponent(initalText.orNull,20) + * val initialText: Option[String] = getInitialText + * val textField = new JComponent(initialText.orNull,20) * }}} */ @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null) diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index 1c4f233e22..cf1de0c8e6 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -397,7 +397,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * @inheritdoc * * Another way to express this - * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`. + * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`. * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets. * * $willNotTerminateInf diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 20712f918c..0783beac0f 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -479,7 +479,7 @@ trait Iterator[+A] extends TraversableOnce[A] { } } - /** Produces a collection containing cummulative results of applying the + /** Produces a collection containing cumulative results of applying the * operator going left to right. * * $willNotTerminateInf @@ -502,8 +502,8 @@ trait Iterator[+A] extends TraversableOnce[A] { } else Iterator.empty.next() } - /** Produces a collection containing cummulative results of applying the operator going right to left. - * The head of the collection is the last cummulative result. + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. * * $willNotTerminateInf * $orderDependent diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index 64248aa755..9c336e8e31 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -44,7 +44,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea * * $willNotTerminateInf * - * Note: the execution of `length` may take time proportial to the length of the sequence. + * Note: the execution of `length` may take time proportional to the length of the sequence. */ def length: Int = { var these = self @@ -57,7 +57,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } /** Selects an element by its index in the $coll. - * Note: the execution of `apply` may take time proportial to the index value. + * Note: the execution of `apply` may take time proportional to the index value. * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`. */ def apply(n: Int): A = { diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index e4b7371ed4..f548eac88d 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -406,7 +406,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // create a new HashSet1 with the hash we already know new HashSet1(ks1.head, hash) case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } @@ -426,7 +426,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // create a new HashSet1 with the hash we already know new HashSet1(ks1.head, hash) case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } @@ -445,7 +445,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // Should only have HSC1 if size > 1 this case _ => - // create a new HashSetCollison with the hash we already know and the new keys + // create a new HashSetCollision with the hash we already know and the new keys new HashSetCollision1(hash, ks1) } } else this diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 09a69b8096..9ed5162061 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -944,7 +944,7 @@ self => * * @param p the test predicate. * @return A new `Stream` representing the results of applying `p` to the - * oringal `Stream`. + * original `Stream`. * * @example {{{ * // Assume we have a Stream that takes the first 20 natural numbers diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 671b79f8c2..fd95e74fbc 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -41,7 +41,7 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") @SerialVersionUID(-8144992287952814767L) class DoubleLinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala index a43fe34c99..aafe34f50a 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala @@ -56,10 +56,10 @@ import scala.annotation.migration * @define Coll `DoubleLinkedList` * @define coll double linked list */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self => - /** A reference to the node in the linked list preceeding the current node. */ + /** A reference to the node in the linked list preceding the current node. */ var prev: This = _ // returns that list if this list is empty diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index 092698ac0b..b3500367af 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -76,7 +76,7 @@ import generic._ * }}} */ @SerialVersionUID(-7308240733518833071L) -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") class LinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] with GenericTraversableTemplate[A, LinkedList] diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index 987b83d23b..a9d385bc5b 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -55,7 +55,7 @@ import scala.annotation.tailrec * * }}} */ -@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0") +@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self => var elem: A = _ diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index 5fafe23d9f..c124f35cd7 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -19,7 +19,7 @@ import generic.CanBuildFrom * on a map that will no longer have elements removed but will be * used heavily may save both time and storage space. * - * This map is not indended to contain more than 2^29 entries (approximately + * This map is not intended to contain more than 2^29 entries (approximately * 500 million). The maximum capacity is 2^30, but performance will degrade * rapidly as 2^30 is approached. * diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala index 471cd1cdde..af28df1b88 100644 --- a/src/library/scala/collection/mutable/MapLike.scala +++ b/src/library/scala/collection/mutable/MapLike.scala @@ -133,7 +133,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] /** Creates a new map containing the key/value mappings provided by the specified traversable object * and all the key/value mappings of this map. * - * Note that existing mappings from this map with the same key as those in `xs` will be overriden. + * Note that existing mappings from this map with the same key as those in `xs` will be overridden. * * @param xs the traversable object. * @return a new map containing mappings of this map and those provided by `xs`. diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index aade2ed6fb..24f5761cf5 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -31,7 +31,7 @@ object OpenHashMap { /** A mutable hash map based on an open hashing scheme. The precise scheme is * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unneccessarily penalised. In particular, + * with consecutive hash codes is not unnecessarily penalised. In particular, * mappings of consecutive integer keys should work without significant * performance loss. * diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala index 26b061b2a5..6a2b6de75a 100644 --- a/src/library/scala/collection/package.scala +++ b/src/library/scala/collection/package.scala @@ -18,7 +18,7 @@ package scala * * == Using Collections == * - * It is convienient to treat all collections as either + * It is convenient to treat all collections as either * a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as * these traits define the vast majority of operations * on a collection. diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 2b54e05841..016255dca4 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -744,7 +744,7 @@ self: ParIterableLike[T, Repr, Sequential] => * The index flag is initially set to maximum integer value. * * @param pred the predicate used to test the elements - * @return the longest prefix of this $coll of elements that satisy the predicate `pred` + * @return the longest prefix of this $coll of elements that satisfy the predicate `pred` */ def takeWhile(pred: T => Boolean): Repr = { val cbf = combinerFactory diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index 91c54fa8f1..d77dcb0658 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -206,7 +206,7 @@ package parallel { * Methods `beforeCombine` and `afterCombine` are called before and after * combining the buckets, respectively, given that the argument to `combine` * is not `this` (as required by the `combine` contract). - * They can be overriden in subclasses to provide custom behaviour by modifying + * They can be overridden in subclasses to provide custom behaviour by modifying * the receiver (which will be the return value). */ private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index e93a3284dc..914646320c 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -485,7 +485,7 @@ object Future { * The result becomes available once the asynchronous computation is completed. * * @tparam T the type of the result - * @param body the asychronous computation + * @param body the asynchronous computation * @param executor the execution context on which the future is run * @return the `Future` holding the result of the computation */ diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 494c955833..9634f6d900 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -93,7 +93,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, set has been // deprecated in order to eventually be able to make "setting" private - @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0") + @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def set(x: A): Unit = setVal(x) @@ -113,7 +113,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, unset has been // deprecated in order to eventually be able to make "unsetting" private - @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0") + @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def unset(): Unit = synchronized { isDefined = false diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala index 64836ecd6e..0f9656436b 100644 --- a/src/library/scala/io/StdIn.scala +++ b/src/library/scala/io/StdIn.scala @@ -4,7 +4,7 @@ package io import java.text.MessageFormat /** private[scala] because this is not functionality we should be providing - * in the standard library, at least not in this idiosyncractic form. + * in the standard library, at least not in this idiosyncratic form. * Factored into trait because it is better code structure regardless. */ private[scala] trait StdIn { diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index 74a174ea74..cf95f945ba 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -364,7 +364,7 @@ object BigDecimal { * to a decimal text representation, and build a `BigDecimal` based on that. * `BigDecimal.binary` will expand the binary fraction to the requested or default * precision. `BigDecimal.exact` will expand the binary fraction to the - * full number of digits, thus producing the exact decimal value corrsponding to + * full number of digits, thus producing the exact decimal value corresponding to * the binary fraction of that floating-point number. `BigDecimal` equality * matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`. * Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead, diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala index 2d5f832e1f..a8fdfc1059 100644 --- a/src/library/scala/runtime/MethodCache.scala +++ b/src/library/scala/runtime/MethodCache.scala @@ -16,7 +16,7 @@ import java.lang.{ Class => JClass } import scala.annotation.tailrec /** An element of a polymorphic object cache. - * This class is refered to by the `CleanUp` phase. Each `PolyMethodCache` chain + * This class is referred to by the `CleanUp` phase. Each `PolyMethodCache` chain * must only relate to one method as `PolyMethodCache` does not identify * the method name and argument types. In practice, one variable will be * generated per call point, and will uniquely relate to the method called diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index f50059ce54..18fcbf8276 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -251,7 +251,7 @@ object ScalaRunTime { * * The primary motivation for this method is to provide a means for * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naïvely calling toString on said value. + * avoiding the pitfalls of naively calling toString on said value. * In particular, it addresses the fact that (a) toString cannot be * called on null and (b) depending on the apparent type of an * array, toString may or may not print it in a human-readable form. diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index 1340a6c415..b1976ad4b6 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -119,7 +119,7 @@ package scala.sys { * ==Handling Input and Output== * * In the underlying Java model, once a `Process` has been started, one can - * get `java.io.InputStream` and `java.io.OutpuStream` representing its + * get `java.io.InputStream` and `java.io.OutputStream` representing its * output and input respectively. That is, what one writes to an * `OutputStream` is turned into input to the process, and the output of a * process can be read from an `InputStream` -- of which there are two, one diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala index 1cf55cb28d..8f811f950e 100644 --- a/src/manual/scala/man1/Command.scala +++ b/src/manual/scala/man1/Command.scala @@ -47,7 +47,7 @@ trait Command { def copyright = Section("COPYRIGHT", "This is open-source software, available to you under a BSD-like license. " & - "See accomponying \"copyright\" or \"LICENSE\" file for copying conditions. " & + "See accompanying \"copyright\" or \"LICENSE\" file for copying conditions. " & "There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A " & "PARTICULAR PURPOSE.") diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 31d25d4801..3954ed588e 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -360,7 +360,7 @@ object scalac extends Command { "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"), Definition( MItalic("selectivecps"), - MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"), + MItalic("@cps") & "-driven transform of selectiveanf assignments (CPS plugin)"), Definition( MItalic("uncurry"), "uncurry, translate function values to anonymous classes"), diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index 37ef4684ef..8459419fa5 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -136,7 +136,7 @@ abstract class BytecodeTest { object BytecodeTest { /** Parse `file` as a class file, transforms the ASM representation with `f`, - * and overwrites the orginal file. + * and overwrites the original file. */ def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) { val rfile = new reflect.io.File(file) diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java index d6b62e1d9e..848103f5cc 100644 --- a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java +++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java @@ -12,7 +12,7 @@ import java.util.Map; * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class. * - * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only + * WARNING: This class is INTERNAL implementation detail and should never be used directly. It's made public only * because it must be universally accessible for instrumentation needs. If you want to profile your test use * {@link Instrumentation} instead. */ diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala index 673dbce6f5..c6352905d1 100644 --- a/src/reflect/scala/reflect/api/Liftables.scala +++ b/src/reflect/scala/reflect/api/Liftables.scala @@ -52,7 +52,7 @@ trait Liftables { self: Universe => object Unliftable extends StandardUnliftableInstances { /** A helper method that simplifies creation of `Unliftable` instances. * Takes a partial function which is defined on correct representations of `T` - * and returns corresponing instances. + * and returns corresponding instances. * * For example to extract a reference to an object as object itself: * diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index 773c6b6fb4..adaf829b32 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -338,7 +338,7 @@ trait Mirrors { self: Universe => * with getting a field or invoking a getter method of the field. * * If `symbol` represents a field of a base class with respect to the class of the receiver, - * and this base field is overriden in the class of the receiver, then this method will retrieve + * and this base field is overridden in the class of the receiver, then this method will retrieve * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor. */ def get: Any @@ -352,7 +352,7 @@ trait Mirrors { self: Universe => * with setting a field or invoking a setter method of the field. * * If `symbol` represents a field of a base class with respect to the class of the receiver, - * and this base field is overriden in the class of the receiver, then this method will set + * and this base field is overridden in the class of the receiver, then this method will set * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor. */ def set(value: Any): Unit diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 03f7b2d218..472da60338 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -72,10 +72,10 @@ trait Names { * @group API */ abstract class NameApi { - /** Checks wether the name is a term name */ + /** Checks whether the name is a term name */ def isTermName: Boolean - /** Checks wether the name is a type name */ + /** Checks whether the name is a type name */ def isTypeName: Boolean /** Returns a term name that wraps the same string as `this` */ diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index 18d185067e..c01029d067 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -339,7 +339,7 @@ trait Symbols { self: Universe => @deprecated("Use `overrides` instead", "2.11.0") def allOverriddenSymbols: List[Symbol] - /** Returns all symbols overriden by this symbol. + /** Returns all symbols overridden by this symbol. * * @group Basics */ diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index aeaa38c317..9ecd87c17e 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -158,7 +158,7 @@ trait Trees { self: Universe => /** Do all parts of this tree satisfy predicate `p`? */ def forAll(p: Tree => Boolean): Boolean - /** Tests whether two trees are structurall equal. + /** Tests whether two trees are structurally equal. * Note that `==` on trees is reference equality. */ def equalsStructure(that : Tree): Boolean diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 0ca8611719..54f64153c1 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -144,7 +144,7 @@ trait BaseTypeSeqs { "\n --- because ---\n"+msg) } - /** A merker object for a base type sequence that's no yet computed. + /** A marker object for a base type sequence that's no yet computed. * used to catch inheritance cycles */ val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) @@ -152,7 +152,7 @@ trait BaseTypeSeqs { /** Create a base type sequence consisting of a single type */ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp)) - /** Create the base type sequence of a compound type wuth given tp.parents */ + /** Create the base type sequence of a compound type with given tp.parents */ def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = { val tsym = tp.typeSymbol val parents = tp.parents diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 7e2d124486..e73fc7e04b 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -931,7 +931,7 @@ trait Definitions extends api.StandardDefinitions { // members of class scala.Any - // TODO these aren't final! They are now overriden in AnyRef/Object. Prior to the fix + // TODO these aren't final! They are now overridden in AnyRef/Object. Prior to the fix // for SI-8129, they were actually *overloaded* by the members in AnyRef/Object. // We should unfinalize these, override in AnyValClass, and make the overrides final. // Refchecks never actually looks at these, so its just for consistency. diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 4a35e024de..0cbb976a98 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -277,7 +277,7 @@ trait Mirrors extends api.Mirrors { // TODO - having these as objects means they elude the attempt to // add synchronization in SynchronizedSymbols. But we should either - // flip on object overrides or find some other accomodation, because + // flip on object overrides or find some other accommodation, because // lazy vals are unnecessarily expensive relative to objects and it // is very beneficial for a handful of bootstrap symbols to have // first class identities diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index b50f324074..32d12d305e 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -131,11 +131,11 @@ trait Names extends api.Names { newTermName(cs, offset, len, cachedString).toTypeName /** Create a term name from string. */ - @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala + @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null) /** Create a type name from string. */ - @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala + @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala def newTypeName(s: String): TypeName = newTermName(s).toTypeName /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */ diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index c16d8778d9..4d0e31b037 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -204,7 +204,7 @@ trait Positions extends api.Positions { self: SymbolTable => /** Set position of all children of a node * @param pos A target position. * Uses the point of the position as the point of all positions it assigns. - * Uses the start of this position as an Offset position for unpositioed trees + * Uses the start of this position as an Offset position for unpositioned trees * without children. * @param trees The children to position. All children must be positionable. */ diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 614e71b597..cca33253be 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -27,7 +27,7 @@ trait StdAttachments { def importAttachment(importer: Importer): this.type } - /** Attachment that doesn't contain any reflection artificats and can be imported as-is. */ + /** Attachment that doesn't contain any reflection artifacts and can be imported as-is. */ trait PlainAttachment extends ImportableAttachment { def importAttachment(importer: Importer): this.type = this } @@ -42,7 +42,7 @@ trait StdAttachments { */ case object BackquotedIdentifierAttachment extends PlainAttachment - /** Identifies trees are either result or intermidiate value of for loop desugaring. + /** Identifies trees are either result or intermediate value of for loop desugaring. */ case object ForAttachment extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8e86e6fad9..e91bfadc85 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -682,7 +682,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * to fix the core of the compiler risk stability a few weeks before the final release. * upd. Haha, "a few weeks before the final release". This surely sounds familiar :) * - * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something + * However we do need to fix this for runtime reflection, since this idiosyncrasy is not something * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization. */ @@ -740,7 +740,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase. * This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the - * info is in the responsability of the caller. Doing it eagerly here was tried (0ccdb151f) but + * info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but * has proven to lead to bugs (SI-8907). * * Here's an example where one can see all four of FF FT TF TT for (isStatic, isMethod) at @@ -2812,7 +2812,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def outerSource: Symbol = // SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName` - // in the face of clases named '$'. SI-2806 remains open to address the deeper problem. + // in the face of classes named '$'. SI-2806 remains open to address the deeper problem. if (originalName endsWith (nme.OUTER)) initialize.referenced else NoSymbol @@ -3570,7 +3570,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param syms the prototypical symbols * @param symFn the function to create new symbols * @param tpe the prototypical type - * @return the new symbol-subsituted type + * @return the new symbol-substituted type */ def deriveType(syms: List[Symbol], symFn: Symbol => Symbol)(tpe: Type): Type = { val syms1 = deriveSymbols(syms, symFn) @@ -3585,7 +3585,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param as arguments to be passed to symFn together with symbols from syms (must be same length) * @param symFn the function to create new symbols based on `as` * @param tpe the prototypical type - * @return the new symbol-subsituted type + * @return the new symbol-substituted type */ def deriveType2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol)(tpe: Type): Type = { val syms1 = deriveSymbols2(syms, as, symFn) diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6e8e992d16..b3e11a826e 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -713,7 +713,7 @@ abstract class TreeGen { val rhsUnchecked = mkUnchecked(rhs) - // TODO: clean this up -- there is too much information packked into mkPatDef's `pat` argument + // TODO: clean this up -- there is too much information packed into mkPatDef's `pat` argument // when it's a simple identifier (case Some((name, tpt)) -- above), // pat should have the type ascription that was specified by the user // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index c521277f69..4657fa0000 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -588,7 +588,7 @@ abstract class TreeInfo { private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol - /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know + /** Is this pattern node a synthetic catch-all case, added during PartialFunction synthesis before we know * whether the user provided cases are exhaustive. */ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match { case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true @@ -815,7 +815,7 @@ abstract class TreeInfo { object Unapplied { // Duplicated with `spliceApply` def unapply(tree: Tree): Option[Tree] = tree match { - // SI-7868 Admit Select() to account for numeric widening, e.g. <unappplySelector>.toInt + // SI-7868 Admit Select() to account for numeric widening, e.g. <unapplySelector>.toInt case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil) => Some(fun) case Apply(fun, _) => unapply(fun) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a95f626a0b..ce36f7efa3 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -731,7 +731,7 @@ trait Types * `substThis(from, to).substSym(symsFrom, symsTo)`. * * `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that - * symbol substitution occured before `ThisType` substitution. Consequently, in substitution + * symbol substitution occurred before `ThisType` substitution. Consequently, in substitution * of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`. */ def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type = @@ -1687,7 +1687,7 @@ trait Types */ private var refs: Array[RefMap] = _ - /** The initialization state of the class: UnInialized --> Initializing --> Initialized + /** The initialization state of the class: UnInitialized --> Initializing --> Initialized * Syncnote: This var need not be protected with synchronized, because * it is accessed only from expansiveRefs, which is called only from * Typer. @@ -1972,7 +1972,7 @@ trait Types require(sym.isNonClassType, sym) /* Syncnote: These are pure caches for performance; no problem to evaluate these - * several times. Hence, no need to protected with synchronzied in a mutli-threaded + * several times. Hence, no need to protected with synchronized in a multi-threaded * usage scenario. */ private var relativeInfoCache: Type = _ @@ -2643,7 +2643,7 @@ trait Types * nowhere inside a type argument * - no quantified type argument contains a quantified variable in its bound * - the typeref's symbol is not itself quantified - * - the prefix is not quanitified + * - the prefix is not quantified */ def isRepresentableWithWildcards = { val qset = quantified.toSet @@ -3101,7 +3101,7 @@ trait Types // addressed here: all lower bounds are retained and their intersection calculated when the // bounds are solved. // - // In a side-effect free universe, checking tp and tp.parents beofre checking tp.baseTypeSeq + // In a side-effect free universe, checking tp and tp.parents before checking tp.baseTypeSeq // would be pointless. In this case, each check we perform causes us to lose specificity: in // the end the best we'll do is the least specific type we tested against, since the typevar // does not see these checks as "probes" but as requirements to fulfill. @@ -3332,7 +3332,7 @@ trait Types * * SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`, * which contain `ErasedValueType`-s. In order to correctly consider the overriding - * and overriden signatures as equivalent in `run/t6385.scala`, it is critical that + * and overridden signatures as equivalent in `run/t6385.scala`, it is critical that * this type contains the erasure of the wrapped type, rather than the unerased type * of the value class itself, as was originally done. * diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 12b765b7a6..ef22df3f2e 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -32,7 +32,7 @@ trait Variances { /** Is every symbol in the owner chain between `site` and the owner of `sym` * either a term symbol or private[this]? If not, add `sym` to the set of - * esacped locals. + * escaped locals. * @pre sym.isLocalToThis */ @tailrec final def checkForEscape(sym: Symbol, site: Symbol) { diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala index a44bb54734..662d841c91 100644 --- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala +++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala @@ -5,7 +5,7 @@ package annotations /** * An annotation that designates the annotated type should not be checked for violations of * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized - * code the uses an inferred type of an expression as the type of an artifict val/def (for example, + * code the uses an inferred type of an expression as the type of an artifact val/def (for example, * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]]. * * @since 2.10.3 diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index de54f3768e..42b13944f6 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -12,7 +12,7 @@ import TypesStats._ trait FindMembers { this: SymbolTable => - /** Implementatation of `Type#{findMember, findMembers}` */ + /** Implementation of `Type#{findMember, findMembers}` */ private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) { protected val initBaseClasses: List[Symbol] = tpe.baseClasses diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index c1c43178e5..f79099213a 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -75,7 +75,7 @@ private[internal] trait TypeConstraints { /* Syncnote: Type constraints are assumed to be used from only one * thread. They are not exposed in api.Types and are used only locally * in operations that are exposed from types. Hence, no syncing of any - * variables should be ncessesary. + * variables should be necessary. */ /** Guard these lists against AnyClass and NothingClass appearing, diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index a9a7c7780d..3a7a7626fb 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -7,13 +7,13 @@ import scala.collection.generic.Clearable import scala.collection.mutable.{Set => MSet} /** - * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other + * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other * hard references are associated with them. Its primary use case is as a canonical reference * identity holder (aka "hash-consing") via findEntryOrUpdate * * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException * - * This set implmeentation is not in general thread safe without external concurrency control. However it behaves + * This set implementation is not in general thread safe without external concurrency control. However it behaves * properly when GC concurrently collects elements in this set. */ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] { @@ -26,7 +26,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D /** * queue of Entries that hold elements scheduled for GC - * the removeStaleEntries() method works through the queue to remeove + * the removeStaleEntries() method works through the queue to remove * stale entries from the table */ private[this] val queue = new ReferenceQueue[A] @@ -62,7 +62,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt /** - * find the bucket associated with an elements's hash code + * find the bucket associated with an element's hash code */ private[this] def bucketFor(hash: Int): Int = { // spread the bits around to try to avoid accidental collisions using the diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 69ede42cc7..1eb6832b5b 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -47,7 +47,7 @@ trait Enclosures { /** Tries to guess a position for the enclosing application. * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really. - * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion. + * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggered this expansion. * Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack. */ def enclosingPosition: Position diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 1eb67215bb..3b57169565 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -44,7 +44,7 @@ abstract class Universe extends scala.reflect.api.Universe { * it is imperative that you either call `untypecheck` or do `changeOwner(tree, x, y)`. * * Since at the moment `untypecheck` has fundamental problem that can sometimes lead to tree corruption, - * `changeOwner` becomes an indispensible tool in building 100% robust macros. + * `changeOwner` becomes an indispensable tool in building 100% robust macros. * Future versions of the reflection API might obviate the need in taking care of * these low-level details, but at the moment this is what we've got. */ diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index a0da3f3bbf..1c751fb93b 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -38,7 +38,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader) - // overriden by ReflectGlobal + // overridden by ReflectGlobal def rootClassLoader: ClassLoader = this.getClass.getClassLoader trait JavaClassCompleter diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 6d0cb0df45..4f0c0253e9 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -39,7 +39,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb * Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number * of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make * sure that things stay deterministic). However, in case of symbols there's hope, because it's only during - * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable + * initialization that symbols are thread-unsafe. After everything's set up, symbols become immutable * (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe. * * Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's, @@ -102,10 +102,10 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb * * Just a volatile var is fine, because: * 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized - * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts. + * that effectively guards `Symbol.initialize`), which means that there can't be update conflicts. * 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone - * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet) - * or a no-op (if the symbol is already inited), and that is fine in both cases. + * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't initialized yet) + * or a no-op (if the symbol is already initialized), and that is fine in both cases. * * upd. It looks like we also need to keep track of a mask of initialized flags to make sure * that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag). diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala index e66e4eff29..df49e6a2e4 100644 --- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala @@ -30,7 +30,7 @@ class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends Par |Failed to initialize compiler: %s not found. |** Note that as of 2.8 scala does not assume use of the java classpath. |** For the old behavior pass -usejavacp to scala, or if using a Settings - |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) + |** object programmatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) ) value } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index f9f7388363..0347622cf4 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -45,7 +45,7 @@ import java.io.File * all variables defined by that code. To extract the result of an * interpreted line to show the user, a second "result object" is created * which imports the variables exported by the above object and then - * exports members called "$eval" and "$print". To accomodate user expressions + * exports members called "$eval" and "$print". To accommodate user expressions * that read from variables or methods defined in previous statements, "import" * statements are used. * diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index ebca3e7e62..c80b94bf89 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -656,7 +656,7 @@ object JavapClass { // FunFinder.funs(ks) finds anonfuns class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) { - // manglese for closure: typename, $anonfun or lamba, opt method, digits + // manglese for closure: typename, $anonfun or lambda, opt method, digits val closure = """(.*)\$(\$anonfun|lambda)(?:\$+([^$]+))?\$(\d+)""".r // manglese for closure @@ -743,7 +743,7 @@ object JavapClass { case _ => Nil } val res = fs map (_.to[Seq]) getOrElse Seq() - // on second thought, we don't care about lamba method classes, just the impl methods + // on second thought, we don't care about lambda method classes, just the impl methods val rev = res flatMap { case x @ closure(_, "lambda", _, _) => labdaMethod(x, target) @@ -787,7 +787,7 @@ object JavapClass { } } /** Translate the supplied targets to patterns for anonfuns. - * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lamba, + * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lambda, * and lambda includes the extra dollar, func is a method name, and n is an int. * The typename for a nested class is dollar notation, Betty$Bippy. * diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala index f177816b30..9f555aee14 100644 --- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala @@ -80,7 +80,7 @@ trait LoopCommands { def recording(line: String) = Result(keepRunning = true, Option(line)) // most commands do not want to micromanage the Result, but they might want - // to print something to the console, so we accomodate Unit and String returns. + // to print something to the console, so we accommodate Unit and String returns. implicit def resultFromUnit(x: Unit): Result = default implicit def resultFromString(msg: String): Result = { echoCommandMessage(msg) diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala index f69a5b487d..8d8140b638 100644 --- a/src/repl/scala/tools/nsc/interpreter/Power.scala +++ b/src/repl/scala/tools/nsc/interpreter/Power.scala @@ -155,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re } object InternalInfo extends LowPriorityInternalInfo { } - /** Now dealing with the problem of acidentally calling a method on Type + /** Now dealing with the problem of accidentally calling a method on Type * when you're holding a Symbol and seeing the Symbol converted to the * type of Symbol rather than the type of the thing represented by the * symbol, by only implicitly installing one method, "?", and the rest diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala index 36a1405b11..034416e844 100644 --- a/src/scaladoc/scala/tools/ant/Scaladoc.scala +++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala @@ -543,7 +543,7 @@ class Scaladoc extends ScalaMatchingTask { /** Tests if a file exists and prints a warning in case it doesn't. Always * returns the file, even if it doesn't exist. * - * @param file A file to test for existance. + * @param file A file to test for existence. * @return The same file. */ private def existing(file: File): File = { diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js index 478f2e38ac..680ead7a59 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js @@ -25,7 +25,7 @@ $(document).ready(function() $(".diagram-container").css("display", "block"); $(".diagram").each(function() { - // store inital dimensions + // store initial dimensions $(this).data("width", $("svg", $(this)).width()); $(this).data("height", $("svg", $(this)).height()); // store unscaled clone of SVG element diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index 6932f01e9a..7fe8903c76 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -117,7 +117,7 @@ trait MemberEntity extends Entity { def toRoot: List[MemberEntity] /** The templates in which this member has been declared. The first element of the list is the template that contains - * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If + * the currently active declaration of this member, subsequent elements are declarations that have been overridden. If * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All * elements of this list are in the linearization of `inTemplate`. */ def inDefinitionTemplates: List[TemplateEntity] diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala index e8dfe0ac50..c20bbaeef1 100644 --- a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala +++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala @@ -78,7 +78,7 @@ trait Bench extends Benchmark { } /** - * Prints results of the benchmark. May be overidden in benchmarks. + * Prints results of the benchmark. May be overridden in benchmarks. */ def printResults {} diff --git a/test/files/neg/literate_existentials.scala b/test/files/neg/literate_existentials.scala index 8580347bf9..5537c50b3a 100644 --- a/test/files/neg/literate_existentials.scala +++ b/test/files/neg/literate_existentials.scala @@ -187,7 +187,7 @@ object LiterateExistentials { // implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails -// The preceeding line causes the compiler to generate an error message. +// The preceding line causes the compiler to generate an error message. diff --git a/test/files/neg/structural.scala b/test/files/neg/structural.scala index d783399317..00459676a9 100644 --- a/test/files/neg/structural.scala +++ b/test/files/neg/structural.scala @@ -11,13 +11,13 @@ object Test extends App { def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail - def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //suceed + def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //succeeds - def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //suceed - def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //suceed - def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //suceed + def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //succeeds + def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //succeeds + def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //succeeds def f9[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): D }) = x.m[Tata](null) //fail - def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //suceed + def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //succeeds } diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala index a9777f02ed..11af67a529 100644 --- a/test/files/pos/t4070.scala +++ b/test/files/pos/t4070.scala @@ -20,7 +20,7 @@ package b { /* -// With crash below the clasess: +// With crash below the classes: % scalac -Dscalac.debug.tvar ./a.scala [ create] ?_$1 ( In Foo#crash ) [ setInst] tv[Int] ( In Foo#crash, _$1=tv[Int] ) diff --git a/test/files/pos/t8828.scala b/test/files/pos/t8828.scala index 92092b4dd4..182aba54c0 100644 --- a/test/files/pos/t8828.scala +++ b/test/files/pos/t8828.scala @@ -11,7 +11,7 @@ package inner { } // the trait is sealed and doWork is not - // and cannot be overriden: no warning + // and cannot be overridden: no warning private[outer] sealed trait C { def doWork(a: A): A = a } diff --git a/test/files/presentation/visibility/src/Completions.scala b/test/files/presentation/visibility/src/Completions.scala index 8c07934915..69ec3959ad 100644 --- a/test/files/presentation/visibility/src/Completions.scala +++ b/test/files/presentation/visibility/src/Completions.scala @@ -11,7 +11,7 @@ package accessibility { def secretPublic(): Unit def someTests(other: Foo) { - other./*!*/secretPrivate // should be all but scretThis + other./*!*/secretPrivate // should be all but secretThis this./*!*/secretProtected // should hit five completions } @@ -25,7 +25,7 @@ package accessibility { class UnrelatedClass { def someTests(foo: Foo) { - foo./*!*/ // should list public and protected[accessiblity] + foo./*!*/ // should list public and protected[accessibility] } } diff --git a/test/files/run/applydynamic_sip.scala b/test/files/run/applydynamic_sip.scala index cf918a82ed..47d0c6a303 100644 --- a/test/files/run/applydynamic_sip.scala +++ b/test/files/run/applydynamic_sip.scala @@ -40,7 +40,7 @@ object Test extends App { // qual.sel(arg = a, a2: _*) // qual.sel(arg, arg2 = "a2", a2: _*) - // If qual.sel appears immediately on the left-hand side of an assigment + // If qual.sel appears immediately on the left-hand side of an assignment // qual.updateDynamic(“sel”)(expr) qual.sel = expr diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala index 0d7168fa89..7f5af74c3f 100644 --- a/test/files/run/t5313.scala +++ b/test/files/run/t5313.scala @@ -11,7 +11,7 @@ object Test extends IcodeComparison { def bar = { var kept1 = new Object val result = new java.lang.ref.WeakReference(kept1) - kept1 = null // we can't eliminate this assigment because result can observe + kept1 = null // we can't eliminate this assignment because result can observe // when the object has no more references. See SI-5313 kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered var erased2 = null // we can eliminate this store because it's never used diff --git a/test/files/run/t6114.scala b/test/files/run/t6114.scala index cb880ece00..8ad02d5bb2 100644 --- a/test/files/run/t6114.scala +++ b/test/files/run/t6114.scala @@ -51,7 +51,7 @@ object Test extends App { val next = list.asScala ++ List(4,5,6) assert(next != list.asScala) - // Note: Clone is hidden at this level, so no overriden cloning. + // Note: Clone is hidden at this level, so no overridden cloning. } testList diff --git a/test/files/run/t8253.scala b/test/files/run/t8253.scala index c4800b4491..a00d8b91a4 100644 --- a/test/files/run/t8253.scala +++ b/test/files/run/t8253.scala @@ -10,5 +10,5 @@ object Test extends App { show("<sample xmlns:foo={identity(ns1)}/>", q"<sample xmlns:foo={ns1}/>") // `identity(foo)` used to match the overly permissive match in SymbolXMLBuilder - // which was intented to more specifically match `_root_.scala.xml.Text(...)` + // which was intended to more specifically match `_root_.scala.xml.Text(...)` } diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index 774d6f428b..468bcb6dd1 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -36,7 +36,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col // used to check if constructed collection is valid def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = { - // can be overriden in subclasses + // can be overridden in subclasses true } diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala index e474ae737c..6b45a4e9f3 100644 --- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala +++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala @@ -261,7 +261,7 @@ object ScalaRunTime { * * The primary motivation for this method is to provide a means for * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naïvely calling toString on said value. + * avoiding the pitfalls of naively calling toString on said value. * In particular, it addresses the fact that (a) toString cannot be * called on null and (b) depending on the apparent type of an * array, toString may or may not print it in a human-readable form. diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala index 56d8312336..196174c199 100644 --- a/test/junit/scala/collection/TraversableOnceTest.scala +++ b/test/junit/scala/collection/TraversableOnceTest.scala @@ -43,8 +43,8 @@ class TraversableOnceTest { def testReturnTheFirstMatch() = { val d = List(1, 2, 3, 4, 5, 6, 7, 8) def f(x: Int) = x % 3; - assert(d.maxBy(f) == 2, "If multiple elements evaluted to the largest value, maxBy should return the first one.") - assert(d.minBy(f) == 3, "If multiple elements evaluted to the largest value, minBy should return the first one.") + assert(d.maxBy(f) == 2, "If multiple elements evaluated to the largest value, maxBy should return the first one.") + assert(d.minBy(f) == 3, "If multiple elements evaluated to the largest value, minBy should return the first one.") } // Make sure it evaluates f no more than list.length times. @@ -56,7 +56,7 @@ class TraversableOnceTest { evaluatedCountOfMaxBy += 1 x * 10 }) - assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMaxBy times.") + assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMaxBy times.") var evaluatedCountOfMinBy = 0 @@ -64,7 +64,7 @@ class TraversableOnceTest { evaluatedCountOfMinBy += 1 x * 10 }) - assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMinBy times.") + assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMinBy times.") } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala index 5b0f0f238a..5430e33d6c 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala @@ -74,7 +74,7 @@ class MethodLevelOpts { """.stripMargin val m = singleMethod(methodOptCompiler)(code) assertTrue(m.handlers.length == 2) - assertSameCode(m.instructions.dropNonOp, // drop line numbers and lables that are only used by line numbers + assertSameCode(m.instructions.dropNonOp, // drop line numbers and labels that are only used by line numbers // one single label left :-) List(Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), Op(POP), Op(ICONST_2), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), VarOp(ASTORE, 3), Op(ICONST_2), Op(IRETURN), Label(20), Op(ICONST_2), Op(IRETURN)) diff --git a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala index 524d2e45e0..91f94e09b6 100644 --- a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala +++ b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala @@ -19,7 +19,7 @@ class StdNamesTest { } @Test - def testNewTermNameNegativeLenght(): Unit = { + def testNewTermNameNegativeLength(): Unit = { assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, -1)) assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, 0)) } diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala index f4964b63b1..2d08f22d8b 100644 --- a/test/pending/jvm/cf-attributes.scala +++ b/test/pending/jvm/cf-attributes.scala @@ -62,7 +62,7 @@ object anonymousClasses { //InnerClass: // public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$ val x = new Foo() { - override def foo() { println("foo (overriden)"); } + override def foo() { println("foo (overridden)"); } def dummy = 0 } } diff --git a/test/pending/jvm/timeout.scala b/test/pending/jvm/timeout.scala index 22b3647dce..8f29f8ddbe 100644 --- a/test/pending/jvm/timeout.scala +++ b/test/pending/jvm/timeout.scala @@ -1,4 +1,4 @@ -// Test is in pending because although it suceeds locally, +// Test is in pending because although it succeeds locally, // it takes too long on the machine which runs nightly tests. // // [partest] EXPECTED: 100 < x < 900 diff --git a/test/scaladoc/resources/SI-3314-diagrams.scala b/test/scaladoc/resources/SI-3314-diagrams.scala index b80a97b522..7d2cc9447c 100644 --- a/test/scaladoc/resources/SI-3314-diagrams.scala +++ b/test/scaladoc/resources/SI-3314-diagrams.scala @@ -7,7 +7,7 @@ package scala.test.scaladoc { * / / / | \ \ \ * Mon Tue Wed Thu Fri Sat Sun * - * - each member should receive an inhertiance diagram: + * - each member should receive an inheritance diagram: * Value * | * | diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala index 5d692f59ad..b59d2f410d 100644 --- a/test/scaladoc/resources/implicit-inheritance-override.scala +++ b/test/scaladoc/resources/implicit-inheritance-override.scala @@ -35,7 +35,7 @@ class DerivedC extends Base { class DerivedD extends Base { /** - * @tparam T The overriden type parameter comment + * @tparam T The overridden type parameter comment */ override def function[T](arg1: T, arg2: String): Double = 3.0d }
\ No newline at end of file diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala index 6ed51366cb..90e43ac2ed 100644 --- a/test/scaladoc/resources/implicits-ambiguating-res.scala +++ b/test/scaladoc/resources/implicits-ambiguating-res.scala @@ -1,5 +1,5 @@ /** - * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the + * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the * class' own members * * {{{ diff --git a/test/scaladoc/resources/implicits-shadowing-res.scala b/test/scaladoc/resources/implicits-shadowing-res.scala index c5e9493bf3..b7f3ceb895 100644 --- a/test/scaladoc/resources/implicits-shadowing-res.scala +++ b/test/scaladoc/resources/implicits-shadowing-res.scala @@ -1,5 +1,5 @@ /** - * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the + * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the * class' own members * * {{{ diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala index da0f253a37..51633be440 100644 --- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala +++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala @@ -485,7 +485,7 @@ object Test extends Properties("HtmlFactory") { """, true), (Some("DerivedD"), """def function[T](arg1: T, arg2: String): Double - T The overriden type parameter comment + T The overridden type parameter comment arg1 The T term comment arg2 The string comment returns The return comment |