From d7547cb76d41da04c8448cf1de8a5b5686152d17 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 29 Jun 2015 00:29:15 -0700 Subject: SI-6810 Disallow EOL in char literal It's clear that char literals are one-lined like normal string literals. By the same token, pun intended, char literals accept unicode escapes the same as string literals, including `\u000A`. This commit adds the usual exclusions (CR, NL, SU). The spec is outdated in outlawing chars that are not "printable", in particular, the ASCII control codes. The original intention may have been that the ordinary string escapes are required, such as "\b\n". Note that some common escapes are absent, such as "\a". --- .../scala/tools/nsc/ast/parser/Scanners.scala | 8 +++++--- test/files/neg/t6810.check | 22 ++++++++++++++++++++++ test/files/neg/t6810.scala | 22 ++++++++++++++++++++++ 3 files changed, 49 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t6810.check create mode 100644 test/files/neg/t6810.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 92833d647b..d5cb0d6a3b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -515,7 +515,7 @@ trait Scanners extends ScannersCommon { charLitOr(getIdentRest) else if (isOperatorPart(ch) && (ch != '\\')) charLitOr(getOperatorRest) - else { + else if (!isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) { getLitChar() if (ch == '\'') { nextChar() @@ -525,6 +525,8 @@ trait Scanners extends ScannersCommon { syntaxError("unclosed character literal") } } + else + syntaxError("unclosed character literal") } fetchSingleQuote() case '.' => @@ -690,7 +692,7 @@ trait Scanners extends ScannersCommon { private def unclosedStringLit(): Unit = syntaxError("unclosed string literal") - private def getRawStringLit(): Unit = { + @tailrec private def getRawStringLit(): Unit = { if (ch == '\"') { nextRawChar() if (isTripleQuote()) { @@ -707,7 +709,7 @@ trait Scanners extends ScannersCommon { } } - @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = { + @tailrec private def getStringPart(multiLine: Boolean): Unit = { def finishStringPart() = { setStrVal() token = STRINGPART diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check new file mode 100644 index 0000000000..8b0e6715aa --- /dev/null +++ b/test/files/neg/t6810.check @@ -0,0 +1,22 @@ +t6810.scala:4: error: unclosed character literal + val y = ' + ^ +t6810.scala:5: error: unclosed character literal +' // but not embedded EOL sequences not represented as escapes +^ +t6810.scala:9: error: unclosed string literal + val Y = " + ^ +t6810.scala:10: error: unclosed string literal +" // obviously not +^ +t6810.scala:20: error: unclosed quoted identifier + val ` + ^ +t6810.scala:21: error: unclosed quoted identifier +` = EOL // not raw string literals aka triple-quoted, multiline strings +^ +t6810.scala:22: error: '=' expected but '}' found. +} +^ +7 errors found diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala new file mode 100644 index 0000000000..a9d8813389 --- /dev/null +++ b/test/files/neg/t6810.scala @@ -0,0 +1,22 @@ + +trait t6810 { + val x = '\u000A' // char literals accept arbitrary unicode escapes + val y = ' +' // but not embedded EOL sequences not represented as escapes + val z = '\n' // normally, expect this escape + + val X = "\u000A" // it's the same as ordinary string literals + val Y = " +" // obviously not + val Z = "\n" // normally, expect this escape + + val A = """ +""" // which is what these are for + val B = s""" +""" // or the same for interpolated strings + + import scala.compat.Platform.EOL + val `\u000A` = EOL // backquoted identifiers are arbitrary string literals + val ` +` = EOL // not raw string literals aka triple-quoted, multiline strings +} -- cgit v1.2.3 From aad7c67fe047c6ea9b40ff9588adf0b51dbcf57b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 29 Jun 2015 01:05:06 -0700 Subject: SI-6810 Test for CR --- test/files/neg/t6810.check | 12 +++++++++--- test/files/neg/t6810.scala | 4 ++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check index 8b0e6715aa..497ef35070 100644 --- a/test/files/neg/t6810.check +++ b/test/files/neg/t6810.check @@ -16,7 +16,13 @@ t6810.scala:20: error: unclosed quoted identifier t6810.scala:21: error: unclosed quoted identifier ` = EOL // not raw string literals aka triple-quoted, multiline strings ^ -t6810.scala:22: error: '=' expected but '}' found. -} +t6810.scala:24: error: unclosed character literal + val b = ' + ^ +t6810.scala:25: error: unclosed character literal +' // CR seen as EOL by scanner +^ +t6810.scala:24: error: '=' expected but ';' found. + val b = ' ^ -7 errors found +9 errors found diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala index a9d8813389..50c305d70c 100644 --- a/test/files/neg/t6810.scala +++ b/test/files/neg/t6810.scala @@ -19,4 +19,8 @@ trait t6810 { val `\u000A` = EOL // backquoted identifiers are arbitrary string literals val ` ` = EOL // not raw string literals aka triple-quoted, multiline strings + + val a = '\u000D' // similar treatment of CR + val b = ' ' // CR seen as EOL by scanner + val c = '\r' // traditionally } -- cgit v1.2.3 From ab527ce8cc0220443bda5cc3337ebae158c2fe74 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 29 Jun 2015 07:57:33 -0700 Subject: SI-6810 Spec reflects literal parsing literally Emphasize that literal parsing accepts Unicode escapes as if they were escaped. In particular, a newline represented by its Unicode escape does not terminate the line in the middle of a literal. --- spec/01-lexical-syntax.md | 49 ++++++++++++++++++++++++++--------------------- spec/13-syntax-summary.md | 5 +++-- 2 files changed, 30 insertions(+), 24 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index e26cb796c8..06e3a458a4 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -398,40 +398,46 @@ members of type `Boolean`. ### Character Literals ```ebnf -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’ ``` A character literal is a single character enclosed in quotes. -The character is either a printable unicode character or is described -by an [escape sequence](#escape-sequences). +The character can be any Unicode character except the single quote +delimiter or `\u000A` (LF) or `\u000D` (CR); +or any Unicode character represented by either a +[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences). > ```scala > 'a' '\u0041' '\n' '\t' > ``` -Note that `'\u000A'` is _not_ a valid character literal because -Unicode conversion is done before literal parsing and the Unicode -character `\u000A` (line feed) is not a printable -character. One can use instead the escape sequence `'\n'` or -the octal escape `'\12'` ([see here](#escape-sequences)). +Note that although Unicode conversion is done early during parsing, +so that Unicode characters are generally equivalent to their escaped +expansion in the source text, literal parsing accepts arbitrary +Unicode escapes, including the character literal `'\u000A'`, +which can also be written using the escape sequence `'\n'`. ### String Literals ```ebnf stringLiteral ::= ‘"’ {stringElement} ‘"’ -stringElement ::= printableCharNoDoubleQuote | charEscapeSeq +stringElement ::= charNoDoubleQuoteOrNewline | UnicodeEscape | charEscapeSeq ``` -A string literal is a sequence of characters in double quotes. The -characters are either printable unicode character or are described by -[escape sequences](#escape-sequences). If the string literal -contains a double quote character, it must be escaped, -i.e. `"\""`. The value of a string literal is an instance of -class `String`. +A string literal is a sequence of characters in double quotes. +The characters can be any Unicode character except the double quote +delimiter or `\u000A` (LF) or `\u000D` (CR); +or any Unicode character represented by either a +[Unicode escape](01-lexical-syntax.html) or by an [escape sequence](#escape-sequences). + +If the string literal contains a double quote character, it must be escaped using +`"\""`. + +The value of a string literal is an instance of class `String`. > ```scala -> "Hello,\nWorld!" -> "This string contains a \" character." +> "Hello, world!\n" +> "\"Hello,\" replied the world." > ``` #### Multi-Line String Literals @@ -443,11 +449,10 @@ multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} A multi-line string literal is a sequence of characters enclosed in triple quotes `""" ... """`. The sequence of characters is -arbitrary, except that it may contain three or more consuctive quote characters -only at the very end. Characters -must not necessarily be printable; newlines or other -control characters are also permitted. Unicode escapes work as everywhere else, but none -of the escape sequences [here](#escape-sequences) are interpreted. +arbitrary, except that it may contain three or more consecutive quote characters +only at the very end. In particular, embedded newlines +are permitted. Unicode escapes work as everywhere else, but none +of the [escape sequences](#escape-sequences) are interpreted. > ```scala > """the present string diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index 7f73e107de..a4b4aae570 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -57,11 +57,12 @@ floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ booleanLiteral ::= ‘true’ | ‘false’ -characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ +characterLiteral ::= ‘'’ (charNoQuoteOrNewline | UnicodeEscape | charEscapeSeq) ‘'’ stringLiteral ::= ‘"’ {stringElement} ‘"’ | ‘"""’ multiLineChars ‘"""’ -stringElement ::= (printableChar except ‘"’) +stringElement ::= charNoDoubleQuoteOrNewline + | UnicodeEscape | charEscapeSeq multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} -- cgit v1.2.3 From aa0f345de2955819e7048984a7e5fa0acb7e8bc2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 15 Jun 2015 11:21:06 -0700 Subject: SI-1931 Hide Predef.any2stringadd in REPL User imports that reference Predef are relocated to the top of the wrapping template so that they can hide implicits defined in Predef. Only one import from Predef is retained for special treatment. This is simple and sane. The test shows that `import Predef._` restores Predef implicits even if a user-defined term would normally be in scope. A smart `:import` command to turn off or quarantine imports explicitly would allow fine-grained control. --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 3 +- src/repl/scala/tools/nsc/interpreter/Imports.scala | 31 +++++++++++----- test/files/jvm/interpreter.check | 6 +-- test/files/run/constrained-types.check | 8 ++-- test/files/run/kind-repl-command.check | 2 +- test/files/run/reify-repl-fail-gracefully.check | 2 +- test/files/run/reify_newimpl_22.check | 2 +- test/files/run/reify_newimpl_23.check | 2 +- test/files/run/reify_newimpl_25.check | 2 +- test/files/run/reify_newimpl_26.check | 2 +- test/files/run/repl-bare-expr.check | 12 +++--- test/files/run/repl-parens.check | 12 +++--- test/files/run/repl-paste-2.check | 2 +- test/files/run/repl-reset.check | 8 ++-- test/files/run/repl-trim-stack-trace.scala | 6 +-- test/files/run/t1931.scala | 43 ++++++++++++++++++++++ test/files/run/t4542.check | 2 +- test/files/run/t4594-repl-settings.scala | 2 +- test/files/run/t5655.check | 4 +- test/files/run/t7319.check | 6 +-- test/files/run/t7747-repl.check | 32 ++++++++-------- test/files/run/t9170.scala | 24 ++++++------ test/files/run/t9206.scala | 4 +- test/files/run/xMigration.check | 6 +-- 24 files changed, 139 insertions(+), 84 deletions(-) create mode 100644 test/files/run/t1931.scala diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 841b4abfa5..06ae179da9 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -888,7 +888,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set /** Code to import bound names from previous lines - accessPath is code to * append to objectName to access anything bound by request. */ - lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) = + lazy val ComputedImports(headerPreamble, importsPreamble, importsTrailer, accessPath) = exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass)) /** the line of code to compute */ @@ -908,6 +908,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees")) } def preamble = s""" + |$headerPreamble |${preambleHeader format lineRep.readName} |${envLines mkString (" ", ";\n ", ";\n")} |$importsPreamble diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index 3ec77e46f1..5b231d94b6 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -70,7 +70,10 @@ trait Imports { /** Compute imports that allow definitions from previous * requests to be visible in a new request. Returns - * three pieces of related code: + * three or four pieces of related code: + * + * 0. Header code fragment that should go at the beginning + * of the compilation unit, specifically, import Predef. * * 1. An initial code fragment that should go before * the code of the new request. @@ -91,30 +94,34 @@ trait Imports { * (3) It imports multiple same-named implicits, but only the * last one imported is actually usable. */ - case class ComputedImports(prepend: String, append: String, access: String) + case class ComputedImports(header: String, prepend: String, append: String, access: String) protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = { + val header, code, trailingBraces, accessPath = new StringBuilder + val currentImps = mutable.HashSet[Name]() + var predefEscapes = false // only emit predef import header if name not resolved in history, loosely + /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute * useful imports for the specified set of wanted names. */ - case class ReqAndHandler(req: Request, handler: MemberHandler) { } + case class ReqAndHandler(req: Request, handler: MemberHandler) def reqsToUse: List[ReqAndHandler] = { /** Loop through a list of MemberHandlers and select which ones to keep. - * 'wanted' is the set of names that need to be imported. + * 'wanted' is the set of names that need to be imported. */ def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = { // Single symbol imports might be implicits! See bug #1752. Rather than // try to finesse this, we will mimic all imports for now. def keepHandler(handler: MemberHandler) = handler match { - /* While defining classes in class based mode - implicits are not needed. */ + // While defining classes in class based mode - implicits are not needed. case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x)) case _: ImportHandler => true case x => x.definesImplicit || (x.definedNames exists wanted) } reqs match { - case Nil => Nil + case Nil => predefEscapes = wanted contains PredefModule.name ; Nil case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted) case rh :: rest => import rh.handler._ @@ -127,9 +134,6 @@ trait Imports { select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse } - val code, trailingBraces, accessPath = new StringBuilder - val currentImps = mutable.HashSet[Name]() - // add code for a new object to hold some imports def addWrapper() { import nme.{ INTERPRETER_IMPORT_WRAPPER => iw } @@ -146,6 +150,9 @@ trait Imports { try op finally addWrapper() } + // imports from Predef are relocated to the template header to allow hiding. + def checkHeader(h: ImportHandler) = h.referencedNames contains PredefModule.name + // loop through previous requests, adding imports for each one wrapBeforeAndAfter { // Reusing a single temporary value when import from a line with multiple definitions. @@ -153,6 +160,9 @@ trait Imports { for (ReqAndHandler(req, handler) <- reqsToUse) { val objName = req.lineRep.readPathInstance handler match { + case h: ImportHandler if checkHeader(h) => + header.clear() + header append f"${h.member}%n" // If the user entered an import, then just use it; add an import wrapping // level if the import might conflict with some other import case x: ImportHandler if x.importsWildcard => @@ -194,7 +204,8 @@ trait Imports { } } - ComputedImports(code.toString, trailingBraces.toString, accessPath.toString) + val computedHeader = if (predefEscapes) header.toString else "" + ComputedImports(computedHeader, code.toString, trailingBraces.toString, accessPath.toString) } private def allReqAndHandlers = diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 08372685d6..ce3c8062d7 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -30,7 +30,7 @@ scala> val four: anotherint = 4 four: anotherint = 4 scala> val bogus: anotherint = "hello" -:11: error: type mismatch; +:12: error: type mismatch; found : String("hello") required: anotherint (which expands to) Int @@ -353,7 +353,7 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:21: warning: match may not be exhaustive. +:22: warning: match may not be exhaustive. It would fail on the following inputs: Exp(), Term() def f(e: Exp) = e match { // non-exhaustive warning here ^ @@ -363,6 +363,6 @@ scala> :quit plusOne: (x: Int)Int res0: Int = 6 res0: String = after reset -:11: error: not found: value plusOne +:12: error: not found: value plusOne plusOne(5) // should be undefined now ^ diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check index 670d6f49aa..5444cf2088 100644 --- a/test/files/run/constrained-types.check +++ b/test/files/run/constrained-types.check @@ -133,16 +133,16 @@ y: String = hello scala> scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message -:11: error: not found: value e +:12: error: not found: value e val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:11: error: not found: value f +:12: error: not found: value f val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:11: error: not found: value g +:12: error: not found: value g val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:11: error: not found: value h +:12: error: not found: value h val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check index e050fb4bc1..560529ba03 100644 --- a/test/files/run/kind-repl-command.check +++ b/test/files/run/kind-repl-command.check @@ -19,7 +19,7 @@ scala> :k new { def empty = false } AnyRef{def empty: Boolean}'s kind is A scala> :k Nonexisting -:11: error: not found: value Nonexisting +:12: error: not found: value Nonexisting Nonexisting ^ diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check index 025d377a43..32ed876356 100644 --- a/test/files/run/reify-repl-fail-gracefully.check +++ b/test/files/run/reify-repl-fail-gracefully.check @@ -8,7 +8,7 @@ import scala.reflect.runtime.universe._ scala> scala> reify -:15: error: too few argument lists for macro invocation +:16: error: too few argument lists for macro invocation reify ^ diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check index e69dc60199..985f646579 100644 --- a/test/files/run/reify_newimpl_22.check +++ b/test/files/run/reify_newimpl_22.check @@ -15,7 +15,7 @@ scala> { } println(code.eval) } -:18: free term: Ident(TermName("x")) defined by res0 in :17:14 +:19: free term: Ident(TermName("x")) defined by res0 in :18:14 val code = reify { ^ 2 diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check index 1356d509d3..f60113c69f 100644 --- a/test/files/run/reify_newimpl_23.check +++ b/test/files/run/reify_newimpl_23.check @@ -14,7 +14,7 @@ scala> def foo[T]{ } println(code.eval) } -:16: free type: Ident(TypeName("T")) defined by foo in :15:16 +:17: free type: Ident(TypeName("T")) defined by foo in :16:16 val code = reify { ^ foo: [T]=> Unit diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check index e512cfc52e..9104d8df0b 100644 --- a/test/files/run/reify_newimpl_25.check +++ b/test/files/run/reify_newimpl_25.check @@ -5,7 +5,7 @@ scala> { val tt = implicitly[TypeTag[x.type]] println(tt) } -:14: free term: Ident(TermName("x")) defined by res0 in :13:14 +:15: free term: Ident(TermName("x")) defined by res0 in :14:14 val tt = implicitly[TypeTag[x.type]] ^ TypeTag[x.type] diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check index b203389db1..cbb21854ba 100644 --- a/test/files/run/reify_newimpl_26.check +++ b/test/files/run/reify_newimpl_26.check @@ -4,7 +4,7 @@ scala> def foo[T]{ val tt = implicitly[WeakTypeTag[List[T]]] println(tt) } -:12: free type: Ident(TypeName("T")) defined by foo in :10:16 +:13: free type: Ident(TypeName("T")) defined by foo in :11:16 val tt = implicitly[WeakTypeTag[List[T]]] ^ foo: [T]=> Unit diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check index f0c488455f..e0a1f4ecd6 100644 --- a/test/files/run/repl-bare-expr.check +++ b/test/files/run/repl-bare-expr.check @@ -1,12 +1,12 @@ scala> 2 ; 3 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 2 ;; ^ res0: Int = 3 scala> { 2 ; 3 } -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses { 2 ; 3 } ^ res1: Int = 3 @@ -15,16 +15,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 1 + 2 + 3 } ; bippy+88+11 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ defined object Cow diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index 35853f10da..6516f4ea90 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -18,10 +18,10 @@ scala> ( (2 + 2 ) ) res5: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; ( (2 + 2 ) ) ;; ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; ( (2 + 2 ) ) ;; ^ res6: Int = 5 @@ -38,16 +38,16 @@ res9: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; ((2 + 2)) ;; ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; ((2 + 2)) ;; ^ res10: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; (x: Int) => x + 1 ;; ^ res11: () => Int = @@ -58,7 +58,7 @@ scala> () => 5 res12: () => Int = scala> 55 ; () => 5 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ;; ^ res13: () => Int = diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check index 5b6a84144d..dbf5363c0f 100644 --- a/test/files/run/repl-paste-2.check +++ b/test/files/run/repl-paste-2.check @@ -42,7 +42,7 @@ scala> res5 + res6 res1: Int = 690 scala> val x = dingus -:10: error: not found: value dingus +:11: error: not found: value dingus val x = dingus ^ diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check index b0683fff79..cf4d9a149e 100644 --- a/test/files/run/repl-reset.check +++ b/test/files/run/repl-reset.check @@ -28,13 +28,13 @@ Forgetting all expression results and named terms: $intp, BippyBungus, x1, x2, x Forgetting defined types: BippyBungus scala> x1 + x2 + x3 -:11: error: not found: value x1 +:12: error: not found: value x1 x1 + x2 + x3 ^ -:11: error: not found: value x2 +:12: error: not found: value x2 x1 + x2 + x3 ^ -:11: error: not found: value x3 +:12: error: not found: value x3 x1 + x2 + x3 ^ @@ -42,7 +42,7 @@ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus -:11: error: not found: type BippyBungus +:12: error: not found: type BippyBungus new BippyBungus ^ diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala index ea91e32239..c0814905f9 100644 --- a/test/files/run/repl-trim-stack-trace.scala +++ b/test/files/run/repl-trim-stack-trace.scala @@ -12,7 +12,7 @@ f: Nothing scala> f java.lang.Exception: Uh-oh - at .f(:10) + at .f(:11) ... 69 elided scala> def f = throw new Exception("") @@ -20,7 +20,7 @@ f: Nothing scala> f java.lang.Exception: - at .f(:10) + at .f(:11) ... 69 elided scala> def f = throw new Exception @@ -28,7 +28,7 @@ f: Nothing scala> f java.lang.Exception - at .f(:10) + at .f(:11) ... 69 elided scala> :quit""" diff --git a/test/files/run/t1931.scala b/test/files/run/t1931.scala new file mode 100644 index 0000000000..eedfa9b03d --- /dev/null +++ b/test/files/run/t1931.scala @@ -0,0 +1,43 @@ + +import scala.tools.partest.SessionTest + +object Test extends SessionTest { + + def session = +""" +scala> val x: Any = 42 +x: Any = 42 + +scala> x + " works" +res0: String = 42 works + +scala> import Predef.{ any2stringadd => _, _ } +import Predef.{any2stringadd=>_, _} + +scala> x + " works" +:14: error: value + is not a member of Any + x + " works" + ^ + +scala> import Predef._ +import Predef._ + +scala> x + " works" +res2: String = 42 works + +scala> object Predef { def f = 42 } +defined object Predef + +scala> import Predef._ +import Predef._ + +scala> f +:14: error: not found: value f + f + ^ + +scala> Predef.f +res4: Int = 42 + +scala> :quit""" +} diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check index 3ab3eaffd8..6e099222b0 100644 --- a/test/files/run/t4542.check +++ b/test/files/run/t4542.check @@ -5,7 +5,7 @@ scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() { defined class Foo scala> val f = new Foo -:11: warning: class Foo is deprecated: foooo +:12: warning: class Foo is deprecated: foooo val f = new Foo ^ f: Foo = Bippy diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala index cbd87b5949..f2d1a8b3f8 100644 --- a/test/files/run/t4594-repl-settings.scala +++ b/test/files/run/t4594-repl-settings.scala @@ -15,7 +15,7 @@ object Test extends SessionTest { |scala> :settings -deprecation | |scala> def b = depp - |:11: warning: method depp is deprecated: Please don't do that. + |:12: warning: method depp is deprecated: Please don't do that. | def b = depp | ^ |b: String diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check index 857a5ab556..9191997624 100644 --- a/test/files/run/t5655.check +++ b/test/files/run/t5655.check @@ -6,7 +6,7 @@ scala> import x._ import x._ scala> x -:15: error: reference to x is ambiguous; +:16: error: reference to x is ambiguous; it is imported twice in the same scope by import x._ and import x @@ -14,7 +14,7 @@ and import x ^ scala> x -:15: error: reference to x is ambiguous; +:16: error: reference to x is ambiguous; it is imported twice in the same scope by import x._ and import x diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check index 31923e7119..4d8429e8f2 100644 --- a/test/files/run/t7319.check +++ b/test/files/run/t7319.check @@ -15,21 +15,21 @@ warning: there was one feature warning; re-run with -feature for details convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int scala> convert(Some[Int](0)) -:15: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) +:16: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) --- because --- argument expression's type is not compatible with formal parameter type; found : Some[Int] required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } } convert(Some[Int](0)) ^ -:15: error: type mismatch; +:16: error: type mismatch; found : Some[Int] required: F[_ <: F[_]] convert(Some[Int](0)) ^ scala> Range(1,2).toArray: Seq[_] -:14: error: polymorphic expression cannot be instantiated to expected type; +:15: error: polymorphic expression cannot be instantiated to expected type; found : [B >: Int]Array[B] required: Seq[_] Range(1,2).toArray: Seq[_] diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index f19c39e7e1..487daf4878 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -15,13 +15,13 @@ scala> val z = x * y z: Int = 156 scala> 2 ; 3 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 2 ;; ^ res0: Int = 3 scala> { 2 ; 3 } -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses { 2 ; 3 } ^ res1: Int = 3 @@ -30,16 +30,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 1 + 2 + 3 } ; bippy+88+11 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ defined object Cow @@ -81,10 +81,10 @@ scala> ( (2 + 2 ) ) res10: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; ( (2 + 2 ) ) ;; ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 5 ; ( (2 + 2 ) ) ;; ^ res11: Int = 5 @@ -101,16 +101,16 @@ res14: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; ((2 + 2)) ;; ^ -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; ((2 + 2)) ;; ^ res15: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; (x: Int) => x + 1 ;; ^ res16: () => Int = @@ -121,7 +121,7 @@ scala> () => 5 res17: () => Int = scala> 55 ; () => 5 -:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ;; ^ res18: () => Int = @@ -209,13 +209,13 @@ Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, C Forgetting defined types: BippyBungus, Moo, Ruminant scala> x1 + x2 + x3 -:11: error: not found: value x1 +:12: error: not found: value x1 x1 + x2 + x3 ^ -:11: error: not found: value x2 +:12: error: not found: value x2 x1 + x2 + x3 ^ -:11: error: not found: value x3 +:12: error: not found: value x3 x1 + x2 + x3 ^ @@ -223,7 +223,7 @@ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus -:11: error: not found: type BippyBungus +:12: error: not found: type BippyBungus new BippyBungus ^ diff --git a/test/files/run/t9170.scala b/test/files/run/t9170.scala index d6cf516615..f39467bc25 100644 --- a/test/files/run/t9170.scala +++ b/test/files/run/t9170.scala @@ -8,17 +8,17 @@ object Test extends SessionTest { def session = """ scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } -:10: error: double definition: -def f[A](a: => A): Int at line 10 and -def f[A](a: => Either[Exception,A]): Int at line 10 +:11: error: double definition: +def f[A](a: => A): Int at line 11 and +def f[A](a: => Either[Exception,A]): Int at line 11 have same type after erasure: (a: Function0)Int object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } ^ scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } -:10: error: double definition: -def f[A](a: => A): Int at line 10 and -def f[A](a: => Either[Exception,A]): Int at line 10 +:11: error: double definition: +def f[A](a: => A): Int at line 11 and +def f[A](a: => Either[Exception,A]): Int at line 11 have same type after erasure: (a: Function0)Int object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } ^ @@ -27,9 +27,9 @@ scala> object Y { | def f[A](a: => A) = 1 | def f[A](a: => Either[Exception, A]) = 2 | } -:12: error: double definition: -def f[A](a: => A): Int at line 11 and -def f[A](a: => Either[Exception,A]): Int at line 12 +:13: error: double definition: +def f[A](a: => A): Int at line 12 and +def f[A](a: => Either[Exception,A]): Int at line 13 have same type after erasure: (a: Function0)Int def f[A](a: => Either[Exception, A]) = 2 ^ @@ -44,9 +44,9 @@ object Y { // Exiting paste mode, now interpreting. -:12: error: double definition: -def f[A](a: => A): Int at line 11 and -def f[A](a: => Either[Exception,A]): Int at line 12 +:13: error: double definition: +def f[A](a: => A): Int at line 12 and +def f[A](a: => Either[Exception,A]): Int at line 13 have same type after erasure: (a: Function0)Int def f[A](a: => Either[Exception, A]) = 2 ^ diff --git a/test/files/run/t9206.scala b/test/files/run/t9206.scala index 872c980fe4..406798104e 100644 --- a/test/files/run/t9206.scala +++ b/test/files/run/t9206.scala @@ -7,14 +7,14 @@ object Test extends SessionTest { def session = s"""| |scala> val i: Int = "foo" - |:10: error: type mismatch; + |:11: error: type mismatch; | found : String("foo") | required: Int | val i: Int = "foo" | ^ | |scala> { val j = 42 ; val i: Int = "foo" + j } - |:11: error: type mismatch; + |:12: error: type mismatch; | found : String | required: Int | { val j = 42 ; val i: Int = "foo" + j } diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check index 79ce544493..cd860bf394 100644 --- a/test/files/run/xMigration.check +++ b/test/files/run/xMigration.check @@ -10,7 +10,7 @@ res1: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:any scala> Map(1 -> "eis").values // warn -:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. Map(1 -> "eis").values // warn ^ @@ -24,7 +24,7 @@ res3: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:2.7 scala> Map(1 -> "eis").values // warn -:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. Map(1 -> "eis").values // warn ^ @@ -38,7 +38,7 @@ res5: Iterable[String] = MapLike(eis) scala> :setting -Xmigration // same as :any scala> Map(1 -> "eis").values // warn -:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. Map(1 -> "eis").values // warn ^ -- cgit v1.2.3 From 0aa9281bdf91b8354f138edb175e774ff65d3824 Mon Sep 17 00:00:00 2001 From: Li Yao Date: Sat, 11 Jul 2015 00:49:41 +0800 Subject: Fix the bug in the example in scala.sys.process There's no `!` method with argument type `ProcessIO`. I suppose this is intended to be `run`. --- src/library/scala/sys/process/package.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index b1976ad4b6..141ec07ab4 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -157,7 +157,7 @@ package scala.sys { * while(input.read() != -1) count += 1 * input.close() * } - * cat ! new ProcessIO(_.close(), byteCounter, _.close()) + * cat run new ProcessIO(_.close(), byteCounter, _.close()) * count * } * -- cgit v1.2.3 From 512d19387da86ce95d7edd1742bf03287cf68a39 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 24 Jul 2015 15:35:14 +0200 Subject: Cleanup in Refchecks For historical reasons, when eliminating ModuleDef trees, RefChecks would check if moduleVar field already exists, and only create it if not. In reality, the lookup would always fail. When initially committed, the moduleVar could be created either by the RefChecks transformer or info transformer, see 256aca6. This was later changed (3f1f0a4), after which RefChecks only creates a moduleVar when eliminating a ModuleDef. --- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 15 ++------------- .../scala/tools/nsc/typechecker/SyntheticMethods.scala | 8 +++++--- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 25d45cc819..53bd76f419 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -778,7 +778,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE) val params = defSym newSyntheticValueParams args.map(_.symbol.tpe) defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType) - val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym) + val rhs: Tree = gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats).changeOwner(currentOwner -> defSym) val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params) addDef(position(defSym), DefDef(defSym, strictSubst(BLOCK(rhs, retVal)))) defSym diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 36423fa2aa..92b62f3b16 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1188,20 +1188,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // set NoType so it will be ignored. val cdef = ClassDef(module.moduleClass, impl) setType NoType - // Create the module var unless the immediate owner is a class and - // the module var already exists there. See SI-5012, SI-6712. - def findOrCreateModuleVar() = { - val vsym = ( - if (site.isTerm) NoSymbol - else site.info decl nme.moduleVarName(moduleName) - ) - vsym orElse (site newModuleVarSymbol module) - } def newInnerObject() = { - // Create the module var unless it is already in the module owner's scope. - // The lookup is on module.enclClass and not module.owner lest there be a - // nullary method between us and the class; see SI-5012. - val moduleVar = findOrCreateModuleVar() + val moduleVar = site newModuleVarSymbol module val rhs = gen.newModule(module, moduleVar.tpe) val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs) val accessor = DefDef(module, body.changeOwner(moduleVar -> module)) @@ -1217,6 +1205,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } val newTrees = cdef :: ( if (module.isStatic) + // trait T { def f: Object }; object O extends T { object f }. Need to generate method f in O. if (module.isOverridingSymbol) matchingInnerObject() else Nil else newInnerObject() diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index d65d2092ad..c156b8c677 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -335,16 +335,18 @@ trait SyntheticMethods extends ast.TreeDSL { } for ((m, impl) <- methods ; if shouldGenerate(m)) yield impl() } - def extras = ( + def extras = { if (needsReadResolve) { // Aha, I finally decoded the original comment. // This method should be generated as private, but apparently if it is, then // it is name mangled afterward. (Wonder why that is.) So it's only protected. // For sure special methods like "readResolve" should not be mangled. - List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) })) + List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { + m setFlag PRIVATE; REF(clazz.sourceModule) + })) } else Nil - ) + } try impls ++ extras catch { case _: TypeError if reporter.hasErrors => Nil } -- cgit v1.2.3 From 6b53b0302abf4e4946012174a153372ea56abe9a Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 29 Jul 2015 21:46:02 +0100 Subject: ScalaDoc fixes for compiler --- src/compiler/scala/tools/nsc/ScriptRunner.scala | 12 ++++++------ src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Checkable.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 8 ++++---- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/StdAttachments.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Tags.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/compiler/scala/tools/nsc/util/DocStrings.scala | 4 ++-- 10 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 6d24b31531..bf93ad30bc 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -16,16 +16,16 @@ import util.Exceptional.unwrap /** An object that runs Scala code in script files. * - *

For example, here is a complete Scala script on Unix: - *

+ *  For example, here is a complete Scala script on Unix:
+ *  {{{
  *    #!/bin/sh
  *    exec scala "$0" "$@"
  *    !#
  *    Console.println("Hello, world!")
  *    args.toList foreach Console.println
- *  
- *

And here is a batch file example on Windows XP:

- *
+ *  }}}
+ *  And here is a batch file example on Windows XP:
+ *  {{{
  *    ::#!
  *    @echo off
  *    call scala %0 %*
@@ -33,7 +33,7 @@ import util.Exceptional.unwrap
  *    ::!#
  *    Console.println("Hello, world!")
  *    args.toList foreach Console.println
- *  
+ * }}} * * @author Lex Spoon * @version 1.0, 15/05/2006 diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 92833d647b..cd41c75298 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -226,7 +226,7 @@ trait Scanners extends ScannersCommon { * RPAREN if region starts with '(' * RBRACKET if region starts with '[' * RBRACE if region starts with '{' - * ARROW if region starts with `case' + * ARROW if region starts with 'case' * STRINGLIT if region is a string interpolation expression starting with '${' * (the STRINGLIT appears twice in succession on the stack iff the * expression is a multiline string literal). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index a34ab914ef..5b59150f5d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -495,8 +495,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => * generic classes or interfaces. * * @param superName the internal of name of the super class. For interfaces, - * the super class is {@link Object}. May be null, but - * only for the {@link Object} class. + * the super class is [[Object]]. May be null, but + * only for the [[Object]] class. * * @param interfaces the internal names of the class's interfaces (see * {@link Type#getInternalName() getInternalName}). May be diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index fc632e0d0d..309b80f9ba 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -44,7 +44,7 @@ import scala.language.postfixOps * which is essentially the intersection of X and |P|, where |P| is * the erasure of P. If XR <: P, then no warning is emitted. * - * We evaluate "X with conform to P" by checking `X <: P_wild, where + * We evaluate "X with conform to P" by checking `X <: P_wild`, where * P_wild is the result of substituting wildcard types in place of * pattern type variables. This is intentionally stricter than * (X matchesPattern P), see SI-8597 for motivating test cases. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 7ec9cd74a4..6c37cb96c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -110,10 +110,10 @@ trait Implicits { * Ignore their constr field! The list of type constraints returned along with each tree specifies the constraints that * must be met by the corresponding type parameter in `tpars` (for the returned implicit view to be valid). * - * @arg tp from-type for the implicit conversion - * @arg context search implicits here - * @arg tpars symbols that should be considered free type variables - * (implicit search should not try to solve them, just track their constraints) + * @param tp from-type for the implicit conversion + * @param context search implicits here + * @param tpars symbols that should be considered free type variables + * (implicit search should not try to solve them, just track their constraints) */ def allViewsFrom(tp: Type, context: Context, tpars: List[Symbol]): List[(SearchResult, List[TypeConstraint])] = { // my untouchable typevars are better than yours (they can't be constrained by them) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index ab9fa26bac..9f7bdf7aff 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1375,7 +1375,7 @@ trait Infer extends Checkable { * Otherwise, if there is no best alternative, error. * * @param argtpes0 contains the argument types. If an argument is named, as - * "a = 3", the corresponding type is `NamedType("a", Int)'. If the name + * "a = 3", the corresponding type is `NamedType("a", Int)`. If the name * of some NamedType does not exist in an alternative's parameter names, * the type is replaces by `Unit`, i.e. the argument is treated as an * assignment expression. diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index ea44b9dc39..92b0719ba3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -132,7 +132,7 @@ trait StdAttachments { /** Marks the tree as a macro impl reference, which is a naked reference to a method. * * This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`), - * because otherwise typing a naked reference will result in the "follow this method with `_' if you want to + * because otherwise typing a naked reference will result in the "follow this method with `_` if you want to * treat it as a partially applied function" errors. * * This mark suppresses adapt except for when the annottee is a macro application. diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 57dc74d2a0..56127f4026 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -36,7 +36,7 @@ trait Tags { * @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope. * If false then materialization macros are prohibited from running. * - * @returns Tree that represents an `scala.reflect.ClassTag` for `tp` if everything is okay. + * @return Tree that represents an `scala.reflect.ClassTag` for `tp` if everything is okay. * EmptyTree if the result contains unresolved (i.e. not spliced) type parameters and abstract type members. * EmptyTree if `allowMaterialization` is false, and there is no class tag in scope. */ @@ -57,7 +57,7 @@ trait Tags { * @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope. * If false then materialization macros are prohibited from running. * - * @returns Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay. + * @return Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay. * EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members. * EmptyTree if `allowMaterialization` is false, and there is no array tag in scope. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index fd1a6f293f..a7046e45e0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2725,7 +2725,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * * If 'T' is not fully defined, it is inferred by type checking * `apply$body` without a result type before type checking the block. - * The method's inferred result type is used instead of T`. [See test/files/pos/sammy_poly.scala] + * The method's inferred result type is used instead of `T`. [See test/files/pos/sammy_poly.scala] * * The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`, * and `resPt` is derived from `samClassTp` -- it may be fully defined, or not... diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index 4ff7067a21..501546b8f6 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -37,7 +37,7 @@ object DocStrings { /** Returns index of string `str` after `start` skipping longest * sequence of space and tab characters, possibly also containing * a single `*` character or the `/``**` sequence. - * @pre start == str.length || str(start) == `\n' + * @pre start == str.length || str(start) == `\n` */ def skipLineLead(str: String, start: Int): Int = if (start == str.length) start @@ -49,7 +49,7 @@ object DocStrings { else idx } - /** Skips to next occurrence of `\n' or to the position after the `/``**` sequence following index `start`. + /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`. */ def skipToEol(str: String, start: Int): Int = if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3 -- cgit v1.2.3 From c201eac291682a9bdb9ca2790403084b4f36da76 Mon Sep 17 00:00:00 2001 From: Mariot Chauvin Date: Tue, 10 Feb 2015 15:15:29 +0000 Subject: SI-8362: AbstractPromise extends AtomicReference To avoid `sun.misc.Unsafe`, which is not supported on Google App Engine. Deprecate `AbstractPromise` --> extend `j.u.c.atomic.AtomicReference` directly. `AtomicReference.compareAndSet()` should also provide better performance on HotSpot, which compiles it down to the machine's CAS instruction. The binary incompatible change is ok because it's in an internal package. I can't think of any real issue with adding a superclass (which contributes only final methods) to a class in an implementation package (as long as those methods were not introduced in any illicit subclasses of said class). Instead of changing `DefaultPromise`'s super class, let's be more conservative, and do it closest to the source. This is both clearer and more focussed, leaving those subclasses of AbstractPromise we never heard of unaffected. Genesis of the commit: since the work on `Future` performance, `AbstractPromise` is using `Unsafe`, breaking the ability for `Future` to be executed on GAE. At that time, viktorklang suggested to implement a fallback in case `Unsafe` is not available. carey proposed an implementation, and mchv submitted a patch, which was refined by adriaanm. --- bincompat-backward.whitelist.conf | 9 ++++++ bincompat-forward.whitelist.conf | 9 ++++++ .../scala/concurrent/impl/AbstractPromise.java | 37 ++++------------------ 3 files changed, 25 insertions(+), 30 deletions(-) diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf index a1706d103d..0d85590b41 100644 --- a/bincompat-backward.whitelist.conf +++ b/bincompat-backward.whitelist.conf @@ -208,6 +208,15 @@ filter { { matchName="scala.reflect.io.ZipArchive.scala$reflect$io$ZipArchive$$walkIterator" problemName=MissingMethodProblem + }, + // SI-8362: AbstractPromise extends AtomicReference + // It's ok to change a package-protected class in an impl package, + // even though it's not clear why it changed -- bug in generic signature generation? + // -public class scala.concurrent.impl.Promise$DefaultPromise extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise + // +public class scala.concurrent.impl.Promise$DefaultPromise extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise + { + matchName="scala.concurrent.impl.Promise$DefaultPromise" + problemName=MissingTypesProblem } ] } diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf index 8fadb65f39..a9fbaa7b87 100644 --- a/bincompat-forward.whitelist.conf +++ b/bincompat-forward.whitelist.conf @@ -372,6 +372,15 @@ filter { { matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort" problemName=MissingMethodProblem + }, + // SI-8362: AbstractPromise extends AtomicReference + // It's ok to change a package-protected class in an impl package, + // even though it's not clear why it changed -- bug in generic signature generation? + // -public class scala.concurrent.impl.Promise$DefaultPromise extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise + // +public class scala.concurrent.impl.Promise$DefaultPromise extends scala.concurrent.impl.AbstractPromise implements scala.concurrent.impl.Promise + { + matchName="scala.concurrent.impl.Promise$DefaultPromise" + problemName=MissingTypesProblem } ] } diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java index b8165b6cde..c2520a1692 100644 --- a/src/library/scala/concurrent/impl/AbstractPromise.java +++ b/src/library/scala/concurrent/impl/AbstractPromise.java @@ -1,6 +1,6 @@ /* __ *\ ** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** @@ -8,33 +8,10 @@ package scala.concurrent.impl; +import java.util.concurrent.atomic.AtomicReference; -import scala.concurrent.util.Unsafe; -import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; - - - -abstract class AbstractPromise { - private volatile Object _ref; - - final static long _refoffset; - - static { - try { - _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref")); - } catch (Throwable t) { - throw new ExceptionInInitializerError(t); - } - } - - protected final boolean updateState(Object oldState, Object newState) { - return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState); - } - - protected final Object getState() { - return _ref; - } - - protected final static AtomicReferenceFieldUpdater updater = - AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref"); -} \ No newline at end of file +@Deprecated // Since 2.11.8. Extend java.util.concurrent.atomic.AtomicReference instead. +abstract class AbstractPromise extends AtomicReference { + protected final boolean updateState(Object oldState, Object newState) { return compareAndSet(oldState, newState); } + protected final Object getState() { return get(); } +} -- cgit v1.2.3 From defb1465909c3f740871a56973c32b276f775b91 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 29 Jul 2015 11:39:04 +0200 Subject: SI-9375 add synthetic readResolve only for static modules For inner modules, the synthetic readResolve method would cause the module constructor to be invoked on de-serialization in certain situations. See the discussion in the ticket. Adds a comprehensive test around serializing and de-serializing modules. --- .../tools/nsc/typechecker/SyntheticMethods.scala | 1 + test/files/neg/t6666d.check | 4 - test/files/neg/t6666d.scala | 18 -- test/files/pos/t6666d.scala | 18 ++ test/files/run/idempotency-case-classes.check | 3 +- test/files/run/repl-serialization.check | 1 - test/files/run/t9375.check | 60 +++++ test/files/run/t9375.scala | 276 +++++++++++++++++++++ 8 files changed, 356 insertions(+), 25 deletions(-) delete mode 100644 test/files/neg/t6666d.check delete mode 100644 test/files/neg/t6666d.scala create mode 100644 test/files/pos/t6666d.scala create mode 100644 test/files/run/t9375.check create mode 100644 test/files/run/t9375.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index c156b8c677..4ccc183334 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -304,6 +304,7 @@ trait SyntheticMethods extends ast.TreeDSL { clazz.isModuleClass && clazz.isSerializable && !hasConcreteImpl(nme.readResolve) + && clazz.isStatic ) def synthesize(): List[Tree] = { diff --git a/test/files/neg/t6666d.check b/test/files/neg/t6666d.check deleted file mode 100644 index b4785f0129..0000000000 --- a/test/files/neg/t6666d.check +++ /dev/null @@ -1,4 +0,0 @@ -t6666d.scala:7: error: Implementation restriction: access of object TreeOrd$1 from object TreeOrd$2, would require illegal premature access to the unconstructed `this` of class Test - implicit object TreeOrd extends Ordering[K](){ - ^ -one error found diff --git a/test/files/neg/t6666d.scala b/test/files/neg/t6666d.scala deleted file mode 100644 index 49a688f91b..0000000000 --- a/test/files/neg/t6666d.scala +++ /dev/null @@ -1,18 +0,0 @@ - -import scala.collection.immutable.TreeMap -import scala.math.Ordering - -class Test[K](param:TreeMap[K,Int]){ - def this() = this({ - implicit object TreeOrd extends Ordering[K](){ - def compare(a: K, b: K) = { - -1 - } - } - new TreeMap[K, Int]() - }) -} - -object Test extends App { - new Test() -} diff --git a/test/files/pos/t6666d.scala b/test/files/pos/t6666d.scala new file mode 100644 index 0000000000..49a688f91b --- /dev/null +++ b/test/files/pos/t6666d.scala @@ -0,0 +1,18 @@ + +import scala.collection.immutable.TreeMap +import scala.math.Ordering + +class Test[K](param:TreeMap[K,Int]){ + def this() = this({ + implicit object TreeOrd extends Ordering[K](){ + def compare(a: K, b: K) = { + -1 + } + } + new TreeMap[K, Int]() + }) +} + +object Test extends App { + new Test() +} diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check index 5a8d0ad9d3..ea698cec59 100644 --- a/test/files/run/idempotency-case-classes.check +++ b/test/files/run/idempotency-case-classes.check @@ -47,8 +47,7 @@ C(2,3) case def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null)) scala.this.None else - Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y)); - private def readResolve(): Object = C + Some.apply[(Int, Int)](scala.Tuple2.apply[Int, Int](x$0.x, x$0.y)) }; Predef.println(C.apply(2, 3)) } diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check index eb62729f5c..bbbf0dcdf1 100644 --- a/test/files/run/repl-serialization.check +++ b/test/files/run/repl-serialization.check @@ -20,6 +20,5 @@ u: U = U evaluating O constructing A == reconstituting into a fresh classloader - evaluating O == evaluating reconstituted lambda constructing A diff --git a/test/files/run/t9375.check b/test/files/run/t9375.check new file mode 100644 index 0000000000..8f43fab025 --- /dev/null +++ b/test/files/run/t9375.check @@ -0,0 +1,60 @@ + konstruktor: class A + konstruktor: class A$O$12$ + konstruktor: class A$$anon$1 + konstruktor: class A$A + konstruktor: class A$C + konstruktor: class C + konstruktor: class T$O$15$ + konstruktor: class T$$anon$2 + konstruktor: class T$A + konstruktor: class T$C + konstruktor: class A$N$ + konstruktor: class T$N$ +serializing outer objects should not initialize any nested objects +now initializing nested objects + konstruktor: class A$O$ + konstruktor: class A$Op$ + konstruktor: class A$N$O$ + konstruktor: class A$N$Op$ + konstruktor: class A$A$O$ + konstruktor: class A$A$Op$ + konstruktor: class A$T$O$ + konstruktor: class A$T$Op$ + konstruktor: class A$O$11$ + konstruktor: class A$O$13$ + konstruktor: class A$$anon$1$O$ + konstruktor: class A$$anon$1$Op$ + konstruktor: class T$O$ + konstruktor: class T$Op$ + konstruktor: class T$N$O$ + konstruktor: class T$N$Op$ + konstruktor: class T$A$O$ + konstruktor: class T$A$Op$ + konstruktor: class T$T$O$ + konstruktor: class T$T$Op$ + konstruktor: class T$O$14$ + konstruktor: class T$O$16$ + konstruktor: class T$$anon$2$O$ + konstruktor: class T$$anon$2$Op$ +no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself) +deserializing outer objects with non-initialized inners again +accessing modules triggers initialization + konstruktor: class A$O$ + konstruktor: class A$Op$ + konstruktor: class A$N$O$ + konstruktor: class A$N$Op$ +deserializing creates a new object graph, including new scala 'object' instances, no matter where serialization starts +init static module M and field v + konstruktor: class M$ + konstruktor: class M$O$18$ +serDeser does not initialize nested static modules +init M.O + konstruktor: class M$O$ +serDeser nested static module +objects declared in field decls are not static modules, so they deserialize to new instances +init lazy val M.w +objects declared in lazy val are not static modules either + konstruktor: class M$O$19$ +object declared in a function: new instance created on each invocation + konstruktor: class M$O$20$ + konstruktor: class M$O$20$ diff --git a/test/files/run/t9375.scala b/test/files/run/t9375.scala new file mode 100644 index 0000000000..3995b38666 --- /dev/null +++ b/test/files/run/t9375.scala @@ -0,0 +1,276 @@ +import java.io._ + +object SerDes { + def serialize(obj: AnyRef): Array[Byte] = { + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + buffer.toByteArray + } + + def deserialize(a: Array[Byte]): AnyRef = { + val in = new ObjectInputStream(new ByteArrayInputStream(a)) + in.readObject + } + + def serializeDeserialize[T <: AnyRef](obj: T) = deserialize(serialize(obj)).asInstanceOf[T] +} + +import SerDes._ + +// tests to make sure that de-serializing an object does not run its constructor + +trait S extends Serializable { + println(" konstruktor: " + this.getClass) +} + +trait SE extends S { + def outer: Object +} + +class A extends S { + object O extends SE { def outer = A.this } + private[this] object Op extends SE { def outer = A.this } + def P: SE = Op + + object N extends S { + object O extends SE { def outer = N } + private[this] object Op extends SE { def outer = N } + def P: SE = Op + } + + class A extends S { + object O extends SE { def outer = A.this } + private[this] object Op extends SE { def outer = A.this } + def P: SE = Op + } + + trait T extends S { + object O extends SE { def outer = T.this } + private[this] object Op extends SE { def outer = T.this } + def P: SE = Op + } + class C extends T + + def u: SE = { + object O extends SE { def outer = A.this } + O + } + + val v: SE = { + object O extends SE { def outer = A.this } + O + } + + val f: () => SE = () => { + object O extends SE { def outer = A.this } + O + } + + trait GetObj { def O: SE; def P: SE } + val a: GetObj = new GetObj with S { + def anonThis = this + object O extends SE { def outer = anonThis } + private[this] object Op extends SE { def outer = anonThis } + def P: SE = Op + } +} + +trait T extends S { + object O extends SE { def outer = T.this } + private[this] object Op extends SE { def outer = T.this } + def P: SE = Op + + object N extends S { + object O extends SE { def outer = N } + private[this] object Op extends SE { def outer = N } + def P: SE = Op + } + + class A extends S { + object O extends SE { def outer = A.this } + private[this] object Op extends SE { def outer = A.this } + def P: SE = Op + } + + trait T extends S { + object O extends SE { def outer = T.this } + private[this] object Op extends SE { def outer = T.this } + def P: SE = Op + } + class C extends T + + def u: SE = { + object O extends SE { def outer = T.this } + O + } + + val v: SE = { + object O extends SE { def outer = T.this } + O + } + + val f: () => SE = () => { + object O extends SE { def outer = T.this } + O + } + + trait GetObj { def O: SE; def P: SE } + val a: GetObj = new GetObj with S { + def anonThis = this + object O extends SE { def outer = anonThis } + private[this] object Op extends SE { def outer = anonThis } + def P: SE = Op + } +} + +class C extends T + +object DeserializeModuleNoConstructor { + def t(): Unit = { + val a = new A + val aa = new a.A + val ac = new a.C + + val c = new C + val ca = new c.A + val cc = new c.C + + val outers: List[Object] = List( + a, a.N, aa, ac, a.a, + c, c.N, ca, cc, c.a + ) + + println("serializing outer objects should not initialize any nested objects") + + val serANotInit = serialize(a) + outers foreach serializeDeserialize + + println("now initializing nested objects") + + val os: List[(SE, Object)] = List( + a.O -> a, + a.P -> a, + a.N.O -> a.N, + a.N.P -> a.N, + aa.O -> aa, + aa.P -> aa, + ac.O -> ac, + ac.P -> ac, + a.u -> a, + a.v -> a, + a.f() -> a, + a.a.O -> a.a, + a.a.P -> a.a, + + c.O -> c, + c.P -> c, + c.N.O -> c.N, + c.N.P -> c.N, + ca.O -> ca, + ca.P -> ca, + cc.O -> cc, + cc.P -> cc, + c.u -> c, + c.v -> c, + c.f() -> c, + c.a.O -> c.a, + c.a.P -> c.a + ) + + println("no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself)") + + for ((obj, outer) <- os) { + assert(obj.outer eq outer, s"${obj.outer} of $obj -- $outer") + serializeDeserialize(obj) + serializeDeserialize(outer) + } + + println("deserializing outer objects with non-initialized inners again") + val aNotInit = deserialize(serANotInit).asInstanceOf[A] + + println("accessing modules triggers initialization") + aNotInit.O + aNotInit.P + aNotInit.N.O + aNotInit.N.P + + println("deserializing creates a new object graph, including new scala 'object' instances, no matter where serialization starts") + val deserializedAs: List[A] = List( + serializeDeserialize(a), + serializeDeserialize(a.O).outer.asInstanceOf[A], + serializeDeserialize(a.P).outer.asInstanceOf[A], + serializeDeserialize(a.v).outer.asInstanceOf[A] + ) + for (aSD <- deserializedAs) { + assert(aSD ne a) + assert(aSD.O ne a.O) + assert(aSD.P ne a.P) + assert(aSD.N ne a.N) + assert(aSD.N.O ne a.N.O) + assert(aSD.N.P ne a.N.P) + assert(aSD.v ne a.v) + assert(aSD.a.O ne a.a.O) + assert(aSD.a.P ne a.a.P) + } + } +} + +// tests for serializing / deserializing static modules + +object M extends S { + object O extends S + + def u: S = { + object O extends S + O + } + + val v: S = { + object O extends S + O + } + + lazy val w: S = { + object O extends S + O + } + + val f: () => S = () => { + object O extends S + O + } +} + +object SerializingStaticModules { + def t(): Unit = { + println("init static module M and field v") + M + + println("serDeser does not initialize nested static modules") + assert(serializeDeserialize(M) eq M) + + println("init M.O") + M.O + + println("serDeser nested static module") + assert(serializeDeserialize(M.O) eq M.O) + + println("objects declared in field decls are not static modules, so they deserialize to new instances") + assert(serializeDeserialize(M.v) ne M.v) + + println("init lazy val M.w") + + println("objects declared in lazy val are not static modules either") + assert(serializeDeserialize(M.w) ne M.w) + + println("object declared in a function: new instance created on each invocation") + assert(M.f() ne M.f()) + } +} + + +object Test extends App { + DeserializeModuleNoConstructor.t() + SerializingStaticModules.t() +} -- cgit v1.2.3 From f5dc96bb550a840ea150985125f52e025ac8ac49 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 31 Jul 2015 15:27:15 +1000 Subject: SI-9425 Leave Companion.apply if constructor is less accessible Calls to synthetic case class apply methods are inlined to the underlying constructor invocation in refchecks. However, this can lead to accessibility errors if the constructor is private. This commit ensures that the constructor is at least as accessible as the apply method before performing this tranform. I've manually checked that other the optimization still works in other cases: scala> class CaseApply { Some(42) } defined class CaseApply scala> :javap -c CaseApply Compiled from "" public class CaseApply { public CaseApply(); Code: 0: aload_0 1: invokespecial #9 // Method java/lang/Object."":()V 4: new #11 // class scala/Some 7: dup 8: bipush 42 10: invokestatic #17 // Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer; 13: invokespecial #20 // Method scala/Some."":(Ljava/lang/Object;)V 16: pop 17: return } --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 3 ++- test/files/run/t9425.scala | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t9425.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0198529ef7..90ac1f466d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1511,7 +1511,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans sym.isSourceMethod && sym.isCase && sym.name == nme.apply && - isClassTypeAccessible(tree) + isClassTypeAccessible(tree) && + !tree.tpe.resultType.typeSymbol.primaryConstructor.isLessAccessibleThan(tree.symbol) if (doTransform) { tree foreach { diff --git a/test/files/run/t9425.scala b/test/files/run/t9425.scala new file mode 100644 index 0000000000..f251cc8579 --- /dev/null +++ b/test/files/run/t9425.scala @@ -0,0 +1,8 @@ +class C { case class Foo private (x: Int); Foo.apply(0) } + +object Test { + def test(c: C) = {import c.Foo; Foo.apply(0)} + def main(args: Array[String]): Unit = { + test(new C) + } +} -- cgit v1.2.3 From 6fcd8a6d059c574bd18afe73bcb11812bfba3dd3 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 5 Aug 2015 13:05:18 +0100 Subject: Stop mapping to Unit when executing finally code. Finally.invoke has result type Unit so foreach is sufficient here. --- src/library/scala/util/control/Exception.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala index aa30887ba0..24c297a2fc 100644 --- a/src/library/scala/util/control/Exception.scala +++ b/src/library/scala/util/control/Exception.scala @@ -105,7 +105,7 @@ object Exception { case x if rethrow(x) => throw x case x if pf isDefinedAt x => pf(x) } - finally fin map (_.invoke()) + finally fin foreach (_.invoke()) /* Create an empty Try container with this Catch and the supplied `Finally`. */ def andFinally(body: => Unit): Catch[T] = fin match { -- cgit v1.2.3 From 0fbb2c451b98e922eeed8547e2d7b7465bbf8b2b Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 5 Aug 2015 14:00:14 +0100 Subject: Avoid unnecessary implicit view on String Using length instead of size on String to avoid a conversion call. This dump confirms there is a conversion to StringOps when using size. object StringSize { val s = "hi" println(s.size) } $ scalac -Xprint:typer StringSize.scala [[syntax trees at end of typer]] // StringSize.scala package { object StringSize extends scala.AnyRef { def (): StringSize.type = { StringSize.super.(); () }; private[this] val s: String = "hi"; def s: String = StringSize.this.s; scala.this.Predef.println(scala.this.Predef.augmentString(StringSize.this.s).size) } } --- src/library/scala/collection/immutable/StringLike.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 1ead894faf..1b52e40b72 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -217,8 +217,8 @@ self => pos = thisString.indexOf(separator, prev) } while (pos != -1) - if (prev != thisString.size) - res += thisString.substring(prev, thisString.size) + if (prev != thisString.length) + res += thisString.substring(prev, thisString.length) val initialResult = res.result() pos = initialResult.length -- cgit v1.2.3 From 39ea54bf3eb681de7b0484f60e80002d45ebe7ba Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Aug 2015 14:10:30 +1000 Subject: Fix tracing of implicit search under -Ytyper-debug The log messages intented to chronicle implicit search were always being filtered out by virtue of the fact that the the tree passed to `printTyping` was already typed, (e.g. with an implicit MethodType.) This commit enabled printing in this case, although it still filters out trees that are deemed unfit for typer tracing, such as `()`. In the context of implicit search, this happens to filter out the noise of: ``` | | | [search #2] start `()`, searching for adaptation to pt=Unit => Foo[Int,Int] (silent: value in Test) implicits disabled | | | [search #3] start `()`, searching for adaptation to pt=(=> Unit) => Foo[Int,Int] (silent: value in Test) implicits disabled | | | \-> ``` ... which I think is desirable. The motivation for this fix was to better display the interaction between implicit search and type inference. For instance: ``` class Foo[A, B] class Test { implicit val f: Foo[Int, String] = ??? def t[A, B](a: A)(implicit f: Foo[A, B]) = ??? t(1) } ``` ```` % scalac -Ytyper-debug sandbox/instantiate.scala ... | |-- t(1) BYVALmode-EXPRmode (site: value in Test) | | |-- t BYVALmode-EXPRmode-FUNmode-POLYmode (silent: value in Test) | | | [adapt] [A, B](a: A)(implicit f: Foo[A,B])Nothing adapted to [A, B](a: A)(implicit f: Foo[A,B])Nothing | | | \-> (a: A)(implicit f: Foo[A,B])Nothing | | |-- 1 BYVALmode-EXPRmode-POLYmode (site: value in Test) | | | \-> Int(1) | | solving for (A: ?A, B: ?B) | | solving for (B: ?B) | | [search #1] start `[A, B](a: A)(implicit f: Foo[A,B])Nothing` inferring type B, searching for adaptation to pt=Foo[Int,B] (silent: value in Test) implicits disabled | | [search #1] considering f | | [adapt] f adapted to => Foo[Int,String] based on pt Foo[Int,B] | | [search #1] solve tvars=?B, tvars.constr= >: String <: String | | solving for (B: ?B) | | [search #1] success inferred value of type Foo[Int,=?String] is SearchResult(Test.this.f, TreeTypeSubstituter(List(type B),List(String))) | | |-- [A, B](a: A)(implicit f: Foo[A,B])Nothing BYVALmode-EXPRmode (site: value in Test) | | | \-> Nothing | | [adapt] [A, B](a: A)(implicit f: Foo[A,B])Nothing adapted to [A, B](a: A)(implicit f: Foo[A,B])Nothing | | \-> Nothing ``` --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 6 ++++-- src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 7ec9cd74a4..14a64abfe2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -324,8 +324,10 @@ trait Implicits { */ class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors { val searchId = implicitSearchId() - private def typingLog(what: String, msg: => String) = - typingStack.printTyping(tree, f"[search #$searchId] $what $msg") + private def typingLog(what: String, msg: => String) = { + if (printingOk(tree)) + typingStack.printTyping(f"[search #$searchId] $what $msg") + } import infer._ if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index 550fd4e68d..37fbb73b85 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -159,7 +159,7 @@ trait TypersTracking { // Some trees which are typed with mind-numbing frequency and // which add nothing by being printed. Did () type to Unit? Let's // gamble on yes. - private def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) + def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t) def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || ( (tree1.tpe == tree2.tpe) -- cgit v1.2.3 From 2b5f26e61a843af720daf59fa469d921cfc091ca Mon Sep 17 00:00:00 2001 From: Li Yao Date: Fri, 7 Aug 2015 13:48:38 +0800 Subject: Wait until the cat process is finished. --- src/library/scala/sys/process/package.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index 141ec07ab4..445c3aee60 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -157,7 +157,8 @@ package scala.sys { * while(input.read() != -1) count += 1 * input.close() * } - * cat run new ProcessIO(_.close(), byteCounter, _.close()) + * val p = cat run new ProcessIO(_.close(), byteCounter, _.close()) + * p.exitValue() * count * } * -- cgit v1.2.3 From cb8d924ab894cb3fb2b0e6cfe2fc41afc0b52457 Mon Sep 17 00:00:00 2001 From: stusmall Date: Sun, 2 Aug 2015 20:31:15 -0600 Subject: SI-3623 Improved error message for "filename too long" build errors When building on ecryptfs filenames can be limited to ~142 characters. This limit doesn't take long to hit and can leave the the user with a hard to diagnosis error message. Some legacy file systems will have similarly small limits. This just adds a hint that the error might be related to the underlying fs. --- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index a34ab914ef..fca0582539 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -532,6 +532,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") => reporter.error(sym.pos, s"Could not write class $jclassName because it exceeds JVM code size limits. ${e.getMessage}") + case e: java.io.IOException if e.getMessage != null && (e.getMessage contains "File name too long") => + reporter.error(sym.pos, e.getMessage + "\n" + + "This can happen on some encrypted or legacy file systems. Please see SI-3623 for more details.") + } } -- cgit v1.2.3 From 5bd8ea0edffe7b725e5fa665a82a5795d5dafe8f Mon Sep 17 00:00:00 2001 From: Brian McKenna Date: Sun, 7 Sep 2014 12:33:52 -0600 Subject: SI-6806 Add an @implicitAmbiguous annotation Example usage: trait =!=[C, D] implicit def neq[E, F] : E =!= F = null @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}") implicit def neqAmbig1[G, H, J] : J =!= J = null implicit def neqAmbig2[I] : I =!= I = null implicitly[Int =!= Int] Which gives the following error: implicit-ambiguous.scala:9: error: Could not prove Int =!= Int implicitly[Int =!= Int] ^ Better than what was previously given: implicit-ambiguous.scala:9: error: ambiguous implicit values: both method neqAmbig1 in object Test of type [G, H, J]=> Main.$anon.Test.=!=[J,J] and method neqAmbig2 in object Test of type [I]=> Main.$anon.Test.=!=[I,I] match expected type Main.$anon.Test.=!=[Int,Int] implicitly[Int =!= Int] ^ --- .../tools/nsc/typechecker/ContextErrors.scala | 19 ++++-- .../scala/tools/nsc/typechecker/Implicits.scala | 74 ++++++++++++---------- .../scala/tools/nsc/typechecker/RefChecks.scala | 11 ++-- .../scala/annotation/implicitAmbiguous.scala | 34 ++++++++++ .../scala/reflect/internal/Definitions.scala | 1 + src/reflect/scala/reflect/internal/Symbols.scala | 9 +-- .../scala/reflect/runtime/JavaUniverseForce.scala | 1 + test/files/neg/implicit-ambiguous-2.check | 4 ++ test/files/neg/implicit-ambiguous-2.scala | 11 ++++ test/files/neg/implicit-ambiguous-invalid.check | 7 ++ test/files/neg/implicit-ambiguous-invalid.flags | 1 + test/files/neg/implicit-ambiguous-invalid.scala | 6 ++ test/files/neg/implicit-ambiguous.check | 4 ++ test/files/neg/implicit-ambiguous.scala | 11 ++++ 14 files changed, 146 insertions(+), 47 deletions(-) create mode 100644 src/library/scala/annotation/implicitAmbiguous.scala create mode 100644 test/files/neg/implicit-ambiguous-2.check create mode 100644 test/files/neg/implicit-ambiguous-2.scala create mode 100644 test/files/neg/implicit-ambiguous-invalid.check create mode 100644 test/files/neg/implicit-ambiguous-invalid.flags create mode 100644 test/files/neg/implicit-ambiguous-invalid.scala create mode 100644 test/files/neg/implicit-ambiguous.check create mode 100644 test/files/neg/implicit-ambiguous.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b0bd9977a8..94e56a8e52 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1212,7 +1212,8 @@ trait ContextErrors { import definitions._ - def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo, + def AmbiguousImplicitError(info1: ImplicitInfo, tree1: Tree, + info2: ImplicitInfo, tree2: Tree, pre1: String, pre2: String, trailer: String) (isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = { if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) { @@ -1248,10 +1249,20 @@ trait ContextErrors { if (explanation == "") "" else "\n" + explanation ) } + + def treeTypeArgs(annotatedTree: Tree) = annotatedTree match { + case TypeApply(_, args) => args.map(_.toString) + case _ => Nil + } + context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, - if (isView) viewMsg - else s"ambiguous implicit values:\n${coreMsg}match expected type $pt") - ) + (tree1.symbol, tree2.symbol) match { + case (ImplicitAmbiguousMsg(msg), _) => msg.format(treeTypeArgs(tree1)) + case (_, ImplicitAmbiguousMsg(msg)) => msg.format(treeTypeArgs(tree2)) + case (_, _) if isView => viewMsg + case (_, _) => s"ambiguous implicit values:\n${coreMsg}match expected type $pt" + } + )) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 196b137a3e..0eb697f749 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -885,7 +885,7 @@ trait Implicits { * - find the most likely one * - if it matches, forget about all others it improves upon */ - @tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match { + @tailrec private def rankImplicits(pending: Infos, acc: List[(SearchResult, ImplicitInfo)]): List[(SearchResult, ImplicitInfo)] = pending match { case Nil => acc case firstPending :: otherPending => def firstPendingImproves(alt: ImplicitInfo) = @@ -912,7 +912,7 @@ trait Implicits { val pendingImprovingBest = undoLog undo { otherPending filterNot firstPendingImproves } - rankImplicits(pendingImprovingBest, firstPending :: acc) + rankImplicits(pendingImprovingBest, (newBest, firstPending) :: acc) } } @@ -928,14 +928,14 @@ trait Implicits { // So if there is any element not improved upon by the first it is an error. rankImplicits(eligible, Nil) match { case Nil => () - case chosen :: rest => - rest find (alt => !improves(chosen, alt)) match { - case Some(competing) => - AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context) + case (chosenResult, chosenInfo) :: rest => + rest find { case (_, alt) => !improves(chosenInfo, alt) } match { + case Some((competingResult, competingInfo)) => + AmbiguousImplicitError(chosenInfo, chosenResult.tree, competingInfo, competingResult.tree, "both", "and", "")(isView, pt, tree)(context) return AmbiguousSearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala case _ => - if (isView) chosen.useCountView += 1 - else chosen.useCountArg += 1 + if (isView) chosenInfo.useCountView += 1 + else chosenInfo.useCountArg += 1 } } @@ -1445,9 +1445,9 @@ trait Implicits { } } - object ImplicitNotFoundMsg { - def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match { - case Some(m) => Some(new Message(sym, m)) + class ImplicitAnnotationMsg(f: Symbol => Option[String], clazz: Symbol, annotationName: String) { + def unapply(sym: Symbol): Option[(Message)] = f(sym) match { + case Some(m) => Some(new Message(sym, m, annotationName)) case None if sym.isAliasType => // perform exactly one step of dealiasing // this is necessary because ClassManifests are now aliased to ClassTags @@ -1459,41 +1459,45 @@ trait Implicits { // check the message's syntax: should be a string literal that may contain occurrences of the string "${X}", // where `X` refers to a type parameter of `sym` def check(sym: Symbol): Option[String] = - sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match { - case Some(m) => new Message(sym, m).validate - case None => Some("Missing argument `msg` on implicitNotFound annotation.") + sym.getAnnotation(clazz).flatMap(_.stringArg(0) match { + case Some(m) => new Message(sym, m, annotationName).validate + case None => Some(s"Missing argument `msg` on $annotationName annotation.") }) + } + + object ImplicitNotFoundMsg extends ImplicitAnnotationMsg(_.implicitNotFoundMsg, ImplicitNotFoundClass, "implicitNotFound") + + object ImplicitAmbiguousMsg extends ImplicitAnnotationMsg(_.implicitAmbiguousMsg, ImplicitAmbiguousClass, "implicitAmbiguous") + class Message(sym: Symbol, msg: String, annotationName: String) { // http://dcsobral.blogspot.com/2010/01/string-interpolation-in-scala-with.html private val Intersobralator = """\$\{\s*([^}\s]+)\s*\}""".r - class Message(sym: Symbol, msg: String) { - private def interpolate(text: String, vars: Map[String, String]) = - Intersobralator.replaceAllIn(text, (_: Regex.Match) match { - case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "") + private def interpolate(text: String, vars: Map[String, String]) = + Intersobralator.replaceAllIn(text, (_: Regex.Match) match { + case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "") // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw) - }) + }) - private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName) - private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs + private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName) + private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs - def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString)) + def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString)) - def format(typeArgs: List[String]): String = - interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc? + def format(typeArgs: List[String]): String = + interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc? - def validate: Option[String] = { - val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet - val decls = typeParamNames.toSet + def validate: Option[String] = { + val refs = Intersobralator.findAllMatchIn(msg).map(_ group 1).toSet + val decls = typeParamNames.toSet - (refs &~ decls) match { - case s if s.isEmpty => None - case unboundNames => - val singular = unboundNames.size == 1 - val ess = if (singular) "" else "s" - val bee = if (singular) "is" else "are" - Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @implicitNotFound annotation $bee not defined by $sym.") - } + (refs &~ decls) match { + case s if s.isEmpty => None + case unboundNames => + val singular = unboundNames.size == 1 + val ess = if (singular) "" else "s" + val bee = if (singular) "is" else "are" + Some(s"The type parameter$ess ${unboundNames mkString ", "} referenced in the message of the @$annotationName annotation $bee not defined by $sym.") } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0198529ef7..ae5c07a76d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1468,10 +1468,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case m: MemberDef => val sym = m.symbol applyChecks(sym.annotations) - // validate implicitNotFoundMessage - analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn => - reporter.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn") - } + + def messageWarning(name: String)(warn: String) = + reporter.warning(tree.pos, f"Invalid $name message for ${sym}%s${sym.locationString}%s:%n$warn") + + // validate implicitNotFoundMessage and implicitAmbiguousMessage + analyzer.ImplicitNotFoundMsg.check(sym) foreach messageWarning("implicitNotFound") + analyzer.ImplicitAmbiguousMsg.check(sym) foreach messageWarning("implicitAmbiguous") case tpt@TypeTree() => if(tpt.original != null) { diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala new file mode 100644 index 0000000000..46eab9ae8f --- /dev/null +++ b/src/library/scala/annotation/implicitAmbiguous.scala @@ -0,0 +1,34 @@ +package scala.annotation + +import scala.annotation.meta._ + +/** + * To customize the error message that's emitted when an implicit of type + * C[T1,..., TN] is found more than once, annotate the class C + * with @implicitAmbiguous. Assuming C has type parameters X1,..., XN, the + * error message will be the result of replacing all occurrences of ${Xi} in + * the string msg with the string representation of the corresponding type + * argument Ti. * + * + * If more than one @implicitAmbiguous annotation is collected, the compiler is + * free to pick any of them to display. + * + * Nice errors can direct users to fix imports or even tell them why code + * intentionally doesn't compile. + * + * {{{ + * trait =!=[C, D] + * + * implicit def neq[E, F] : E =!= F = null + * + * @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}") + * implicit def neqAmbig1[G, H, J] : J =!= J = null + * implicit def neqAmbig2[I] : I =!= I = null + * + * implicitly[Int =!= Int] + * }}} + * + * @author Brian McKenna + * @since 2.12.0 + */ +final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation {} diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 02fa3c882b..231b6a8a66 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1103,6 +1103,7 @@ trait Definitions extends api.StandardDefinitions { lazy val BridgeClass = requiredClass[scala.annotation.bridge] lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable] lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound] + lazy val ImplicitAmbiguousClass = getClassIfDefined("scala.annotation.implicitAmbiguous") lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration] lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp] lazy val SwitchClass = requiredClass[scala.annotation.switch] diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8a52f0b9d8..1113da2eff 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -884,10 +884,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => // string. So this needs attention. For now the fact that migration is // private[scala] ought to provide enough protection. def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass) - def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) } - def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) } - def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) } - def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) } + def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) } + def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) } + def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) } + def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) } + def implicitAmbiguousMsg = getAnnotation(ImplicitAmbiguousClass) flatMap { _.stringArg(0) } def isCompileTimeOnly = hasAnnotation(CompileTimeOnlyAttr) def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index ea213cadd9..a2232d1963 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -370,6 +370,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.BridgeClass definitions.ElidableMethodClass definitions.ImplicitNotFoundClass + definitions.ImplicitAmbiguousClass definitions.MigrationAnnotationClass definitions.ScalaStrictFPAttr definitions.SwitchClass diff --git a/test/files/neg/implicit-ambiguous-2.check b/test/files/neg/implicit-ambiguous-2.check new file mode 100644 index 0000000000..4a10b0dd65 --- /dev/null +++ b/test/files/neg/implicit-ambiguous-2.check @@ -0,0 +1,4 @@ +implicit-ambiguous-2.scala:10: error: Could not prove Int =!= Int + implicitly[Int =!= Int] + ^ +one error found diff --git a/test/files/neg/implicit-ambiguous-2.scala b/test/files/neg/implicit-ambiguous-2.scala new file mode 100644 index 0000000000..563c8c583f --- /dev/null +++ b/test/files/neg/implicit-ambiguous-2.scala @@ -0,0 +1,11 @@ +object Test { + trait =!=[C, D] + + implicit def neq[E, F] : E =!= F = null + + implicit def neqAmbig1[G, H, J] : J =!= J = null + @annotation.implicitAmbiguous("Could not prove ${I} =!= ${I}") + implicit def neqAmbig2[I] : I =!= I = null + + implicitly[Int =!= Int] +} diff --git a/test/files/neg/implicit-ambiguous-invalid.check b/test/files/neg/implicit-ambiguous-invalid.check new file mode 100644 index 0000000000..68b607c4c2 --- /dev/null +++ b/test/files/neg/implicit-ambiguous-invalid.check @@ -0,0 +1,7 @@ +implicit-ambiguous-invalid.scala:5: warning: Invalid implicitAmbiguous message for method neqAmbig1 in object Test: +The type parameter B referenced in the message of the @implicitAmbiguous annotation is not defined by method neqAmbig1. + implicit def neqAmbig1[A] : A =!= A = null + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/implicit-ambiguous-invalid.flags b/test/files/neg/implicit-ambiguous-invalid.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/implicit-ambiguous-invalid.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/implicit-ambiguous-invalid.scala b/test/files/neg/implicit-ambiguous-invalid.scala new file mode 100644 index 0000000000..f8f9da655f --- /dev/null +++ b/test/files/neg/implicit-ambiguous-invalid.scala @@ -0,0 +1,6 @@ +object Test { + trait =!=[C, D] + + @annotation.implicitAmbiguous("Could not prove ${A} =!= ${B}") + implicit def neqAmbig1[A] : A =!= A = null +} diff --git a/test/files/neg/implicit-ambiguous.check b/test/files/neg/implicit-ambiguous.check new file mode 100644 index 0000000000..0b3cebcb6f --- /dev/null +++ b/test/files/neg/implicit-ambiguous.check @@ -0,0 +1,4 @@ +implicit-ambiguous.scala:10: error: Could not prove Int =!= Int + implicitly[Int =!= Int] + ^ +one error found diff --git a/test/files/neg/implicit-ambiguous.scala b/test/files/neg/implicit-ambiguous.scala new file mode 100644 index 0000000000..79b1297915 --- /dev/null +++ b/test/files/neg/implicit-ambiguous.scala @@ -0,0 +1,11 @@ +object Test { + trait =!=[C, D] + + implicit def neq[E, F] : E =!= F = null + + @annotation.implicitAmbiguous("Could not prove ${J} =!= ${J}") + implicit def neqAmbig1[G, H, J] : J =!= J = null + implicit def neqAmbig2[I] : I =!= I = null + + implicitly[Int =!= Int] +} -- cgit v1.2.3 From 898ee2a01f0c4522f8f9dc33fb175b352e0857e7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 13 Aug 2015 15:21:42 +1000 Subject: Unfinalize the class DefaultPromise It was non-final in Scala 2.11.x, and made final as part of fa0743c32. Removing the final modifier seems like the cleanest way to enable conversions like `javaFuture.toScala.toJava` to return the original `javaFuture` in scala-java8-compat. I have made the methods defined in this class final as an alternative lockdown. Discussion, Motivation: https://github.com/scala/scala-java8-compat/pull/46 https://github.com/scala/scala-java8-compat/pull/50 --- src/library/scala/concurrent/impl/Promise.scala | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 078ad45be9..3538ac6b94 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -178,7 +178,9 @@ private[concurrent] object Promise { * DefaultPromises, and `linkedRootOf` is currently only designed to be called * by Future.flatMap. */ - final class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] { + // Left non-final to enable addition of extra fields by Java/Scala converters + // in scala-java8-compat. + class DefaultPromise[T] extends AtomicReference[AnyRef](Nil) with Promise[T] { /** Get the root promise for this promise, compressing the link chain to that * promise if necessary. @@ -248,12 +250,12 @@ private[concurrent] object Promise { @throws(classOf[TimeoutException]) @throws(classOf[InterruptedException]) - def ready(atMost: Duration)(implicit permit: CanAwait): this.type = + final def ready(atMost: Duration)(implicit permit: CanAwait): this.type = if (tryAwait(atMost)) this else throw new TimeoutException("Futures timed out after [" + atMost + "]") @throws(classOf[Exception]) - def result(atMost: Duration)(implicit permit: CanAwait): T = + final def result(atMost: Duration)(implicit permit: CanAwait): T = ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here def value: Option[Try[T]] = value0 @@ -265,7 +267,7 @@ private[concurrent] object Promise { case _ => None } - override def isCompleted: Boolean = isCompleted0 + override final def isCompleted: Boolean = isCompleted0 @tailrec private def isCompleted0: Boolean = get() match { @@ -274,7 +276,7 @@ private[concurrent] object Promise { case _ => false } - def tryComplete(value: Try[T]): Boolean = { + final def tryComplete(value: Try[T]): Boolean = { val resolved = resolveTry(value) tryCompleteAndGetListeners(resolved) match { case null => false @@ -297,7 +299,7 @@ private[concurrent] object Promise { } } - def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = + final def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = dispatchOrAddCallback(new CallbackRunnable[T](executor.prepare(), func)) /** Tries to add the callback, if already completed, it dispatches the callback to be executed. -- cgit v1.2.3 From 670f377d19496e8922624b43721cb88b8023a56b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 18 Aug 2015 12:16:40 -0700 Subject: Revert "SI-8346 Rebuild invariant sets in #toSet, avoiding CCE" --- .../scala/collection/immutable/HashSet.scala | 7 ----- .../scala/collection/immutable/ListSet.scala | 7 ----- src/library/scala/collection/immutable/Set.scala | 16 ++++------ test/files/run/t8346.check | 6 ---- test/files/run/t8346.scala | 34 ---------------------- 5 files changed, 6 insertions(+), 64 deletions(-) delete mode 100644 test/files/run/t8346.check delete mode 100644 test/files/run/t8346.scala diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index f548eac88d..6851ab6bc7 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -162,13 +162,6 @@ class HashSet[A] extends AbstractSet[A] def - (e: A): HashSet[A] = nullToEmpty(removed0(e, computeHash(e), 0)) - /** Returns this $coll as an immutable set. - * - * A new set will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] - override def filter(p: A => Boolean) = { val buffer = new Array[HashSet[A]](bufferSize(size)) nullToEmpty(filter0(p, false, 0, buffer, 0)) diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index a6e6fba0a5..2e17677359 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -138,13 +138,6 @@ class ListSet[A] extends AbstractSet[A] override def stringPrefix = "ListSet" - /** Returns this $coll as an immutable set. - * - * A new set will not be built; lazy collections will stay lazy. - */ - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] - /** Represents an entry in the `ListSet`. */ protected class Node(override val head: A) extends ListSet[A] with Serializable { diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index 7725ad9ee3..0fbf7942d4 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -35,7 +35,12 @@ trait Set[A] extends Iterable[A] override def companion: GenericCompanion[Set] = Set - override def toSet[B >: A]: Set[B] = to[({type l[a] = immutable.Set[B]})#l] // for bincompat; remove in dev + /** Returns this $coll as an immutable map. + * + * A new map will not be built; lazy collections will stay lazy. + */ + @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] override def seq: Set[A] = this protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there! @@ -57,7 +62,6 @@ object Set extends ImmutableSetFactory[Set] { def - (elem: Any): Set[Any] = this def iterator: Iterator[Any] = Iterator.empty override def foreach[U](f: Any => U): Unit = {} - override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]] } private[collection] def emptyInstance: Set[Any] = EmptySet @@ -88,8 +92,6 @@ object Set extends ImmutableSetFactory[Set] { if (f(elem1)) Some(elem1) else None } - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 2 */ @@ -121,8 +123,6 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem2)) Some(elem2) else None } - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 3 */ @@ -156,8 +156,6 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem3)) Some(elem3) else None } - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** An optimized representation for immutable sets of size 4 */ @@ -193,8 +191,6 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem4)) Some(elem4) else None } - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } } diff --git a/test/files/run/t8346.check b/test/files/run/t8346.check deleted file mode 100644 index 1ba5c31abe..0000000000 --- a/test/files/run/t8346.check +++ /dev/null @@ -1,6 +0,0 @@ -BitSet: List(invariant, invariant, invariant, invariant) -HashSet: List(covariant (true), covariant (true), covariant (true), covariant (true)) -ListSet: List(covariant (true), covariant (true), covariant (true), covariant (true)) -SortedSet: List(invariant, invariant, invariant, invariant) -TreeSet: List(invariant, invariant, invariant, invariant) -ValueSet: invariant diff --git a/test/files/run/t8346.scala b/test/files/run/t8346.scala deleted file mode 100644 index 5f3df84174..0000000000 --- a/test/files/run/t8346.scala +++ /dev/null @@ -1,34 +0,0 @@ -object Test extends App { - import reflect.ClassTag - - object SomeEnum extends Enumeration { - val one, two, three, four = Value - } - - def sctor[A <: Set[Int]](f: Int => A)(implicit A: ClassTag[A]) - : (String, Int => Set[Int]) = - (A.runtimeClass.getSimpleName, f) - - val inits: Seq[(String, Int => Set[Int])] = { - import collection.immutable.{Seq => _, _} - Seq(sctor(BitSet(_)), - sctor(HashSet(_)), - sctor(ListSet(_)), - sctor(SortedSet(_)), - sctor(TreeSet(_))) - } - - def sVarInfo[A](sa: Set[A]): String = { - val saa = sa.toSet[Any] - if (sa eq saa) s"""covariant (${(saa + "hi") contains "hi"})""" - else "invariant" - } - - inits foreach {case (name, singleton) => - print(s"${name}: ") - val one = singleton(1) - println(Seq(2,3,4).scanLeft(one)(_ + _) map sVarInfo toList) - } - - println(s"ValueSet: ${sVarInfo(SomeEnum.values)}") -} -- cgit v1.2.3 From d978442e5d57d934cb0cfe52daf620a4c11a192b Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 19 Aug 2015 08:50:12 +0100 Subject: Fix method name reference in Predef documentation --- src/library/scala/Predef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index cef62922ac..94cb331ce1 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -56,7 +56,7 @@ import scala.io.StdIn * only contain natural numbers (i.e. non-negative), and that the result returned * will also be natural. `require` is distinct from `assert` in that if the * condition fails, then the caller of the function is to blame rather than a - * logical error having been made within `addNaturals` itself. `ensures` is a + * logical error having been made within `addNaturals` itself. `ensuring` is a * form of `assert` that declares the guarantee the function is providing with * regards to its return value. * -- cgit v1.2.3 From e9b47102ed2c49200a4bef48cfb5e21c37086e7b Mon Sep 17 00:00:00 2001 From: martijnhoekstra Date: Wed, 19 Aug 2015 13:15:10 +0200 Subject: Fix documentation of filter introduced in 13f30c --- src/library/scala/collection/immutable/Stream.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index d8f0559706..a6c55f8828 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -508,8 +508,8 @@ self => * * @example {{{ * $naturalsEx - * naturalsFrom(1) 10 } filter { _ % 5 == 0 } take 10 mkString(", ") - * // produces + * naturalsFrom(1) filter { _ % 5 == 0 } take 10 mkString(", ") + * // produces "5, 10, 15, 20, 25, 30, 35, 40, 45, 50" * }}} */ override def filter(p: A => Boolean): Stream[A] = { -- cgit v1.2.3 From c5f3d3f286ee5c26c8ddcf10f6878058e8f7e040 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 18 Aug 2015 10:11:29 +0200 Subject: Remove unnecessary dependency on parallel collections in ScalaRunTime. In method `ScalaRuntime.stringOf(arg: Any, maxElements: Int)` there are `case x: Iterable[_]` and `case x: ParIterable[_]` which have the excat same code that only uses the `GenIterable[_]` API on `x`. Therfore they can be replaced by a single `case x: GenIterable[_]`. The `case x: ParIterable[_]` was added because prevoiusly parallel colections would only match `case x = x.toSting()` which ignores the `maxElements` parameter. This was still the case for other `GenIterable[_]`. Using `case x: GenIterable[_]` will cover those cases as well. This change is required for Scala.js compatibility as it does not support parallel collections. --- src/library/scala/runtime/ScalaRunTime.scala | 6 +-- test/junit/scala/runtime/ScalaRunTimeTest.scala | 57 +++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index a0d89fc0e1..ce27a0ce63 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -9,11 +9,10 @@ package scala package runtime -import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } +import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator, GenIterable } import scala.collection.mutable.WrappedArray import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: } import scala.collection.generic.{ Sorted, IsTraversableLike } -import scala.collection.parallel.ParIterable import scala.reflect.{ ClassTag, classTag } import scala.util.control.ControlThrowable import java.lang.{ Class => jClass } @@ -326,8 +325,7 @@ object ScalaRunTime { case x if useOwnToString(x) => x.toString case x: AnyRef if isArray(x) => arrayToString(x) case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: ParIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") diff --git a/test/junit/scala/runtime/ScalaRunTimeTest.scala b/test/junit/scala/runtime/ScalaRunTimeTest.scala index 9da197c71a..728d8c0ce9 100644 --- a/test/junit/scala/runtime/ScalaRunTimeTest.scala +++ b/test/junit/scala/runtime/ScalaRunTimeTest.scala @@ -67,4 +67,61 @@ class ScalaRunTimeTest { val c = new C() assertFalse(c.toString, isTuple(c)) } + + @Test + def testStingOf() { + import ScalaRunTime.stringOf + import scala.collection._ + import parallel.ParIterable + + assertEquals("null", stringOf(null)) + assertEquals( "\"\"", stringOf("")) + + assertEquals("abc", stringOf("abc")) + assertEquals("\" abc\"", stringOf(" abc")) + assertEquals("\"abc \"", stringOf("abc ")) + + assertEquals("""Array()""", stringOf(Array.empty[AnyRef])) + assertEquals("""Array()""", stringOf(Array.empty[Int])) + assertEquals("""Array(1, 2, 3)""", stringOf(Array(1, 2, 3))) + assertEquals("""Array(a, "", " c", null)""", stringOf(Array("a", "", " c", null))) + assertEquals("""Array(Array("", 1, Array(5)), Array(1))""", + stringOf(Array(Array("", 1, Array(5)), Array(1)))) + + val map = Map(1->"", 2->"a", 3->" a", 4->null) + assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a, 3 -> " a", 4 -> null)""", stringOf(map)) + assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a)""", stringOf(map, 2)) + + val iterable = Iterable("a", "", " c", null) + assertEquals(s"""${iterable.stringPrefix}(a, "", " c", null)""", stringOf(iterable)) + assertEquals(s"""${iterable.stringPrefix}(a, "")""", stringOf(iterable, 2)) + + val parIterable = ParIterable("a", "", " c", null) + assertEquals(s"""${parIterable.stringPrefix}(a, "", " c", null)""", stringOf(parIterable)) + assertEquals(s"""${parIterable.stringPrefix}(a, "")""", stringOf(parIterable, 2)) + + val traversable = new Traversable[Int] { + def foreach[U](f: Int => U): Unit = (0 to 3).foreach(f) + } + assertEquals(s"${traversable.stringPrefix}(0, 1, 2, 3)", stringOf(traversable)) + assertEquals(s"${traversable.stringPrefix}(0, 1)", stringOf(traversable, 2)) + + val tuple1 = Tuple1(0) + assertEquals("(0,)", stringOf(tuple1)) + assertEquals("(0,)", stringOf(tuple1, 0)) + + val tuple2 = Tuple2(0, 1) + assertEquals("(0,1)", stringOf(tuple2)) + assertEquals("(0,1)", stringOf(tuple2, 0)) + + val tuple3 = Tuple3(0, 1, 2) + assertEquals("(0,1,2)", stringOf(tuple3)) + assertEquals("(0,1,2)", stringOf(tuple3, 0)) + + val x = new Object { + override def toString(): String = "this is the stringOf string" + } + assertEquals(stringOf(x), "this is the stringOf string") + assertEquals(stringOf(x, 2), "this is the stringOf string") + } } -- cgit v1.2.3 From 3bfbab3b8e1b86d861dbacc46e26259ebc3d839e Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 24 Aug 2015 12:44:36 -0700 Subject: Clean up Constructors a bit. Shouldn't change behavior, sets stage for moving the transform of ConstantType methods to Uncurry. Constructors still needs a much more thorough overhaul... --- .../scala/tools/nsc/transform/Constructors.scala | 186 ++++++++++----------- 1 file changed, 90 insertions(+), 96 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 7c66bda46b..2828e87bd8 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -396,9 +396,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { } if (stat1 eq stat) { - assert(ctorParams(genericClazz).length == constrInfo.constrParams.length) + assert(ctorParams(genericClazz).length == primaryConstrParams.length) // this is just to make private fields public - (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1) + (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1) val stat2 = rewriteArrayUpdate(stat1) // statements coming from the original class need retyping in the current context @@ -435,16 +435,16 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { // postfix = postfix.tail // } - if (shouldGuard && usesSpecializedField && stats.nonEmpty) { + if (guardSpecializedFieldInit && intoConstructor.usesSpecializedField && stats.nonEmpty) { // save them for duplication in the specialized subclass guardedCtorStats(clazz) = stats - ctorParams(clazz) = constrInfo.constrParams + ctorParams(clazz) = primaryConstrParams val tree = If( Apply( CODE.NOT ( - Apply(gen.mkAttributedRef(specializedFlag), List())), + Apply(gen.mkAttributedRef(hasSpecializedFieldsSym), List())), List()), Block(stats, Literal(Constant(()))), EmptyTree) @@ -476,27 +476,17 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { val stats = impl.body // the transformed template body val localTyper = typer.atOwner(impl, clazz) - val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE) - val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED) - - val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass) - - case class ConstrInfo( - constr: DefDef, // The primary constructor - constrParams: List[Symbol], // ... and its parameters - constrBody: Block // ... and its body - ) - // decompose primary constructor into the three entities above. - val constrInfo: ConstrInfo = { - val ddef = (stats find (_.symbol.isPrimaryConstructor)) - ddef match { - case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) => - ConstrInfo(ddef, vparams map (_.symbol), rhs) - case x => - abort("no constructor in template: impl = " + impl) - } - } - import constrInfo._ + val hasSpecializedFieldsSym = clazz.info.decl(nme.SPECIALIZED_INSTANCE) + // The constructor of a non-specialized class that has specialized subclasses + // should use `q"${hasSpecializedFieldsSym}()"` to guard the initialization of specialized fields. + val guardSpecializedFieldInit = (hasSpecializedFieldsSym != NoSymbol) && !clazz.hasFlag(SPECIALIZED) + + val isDelayedInitSubclass = clazz isSubClass DelayedInitClass + + // find and dissect primary constructor + val (primaryConstr, primaryConstrParams, primaryConstrBody) = stats collectFirst { + case dd@DefDef(_, _, _, vps :: Nil, _, rhs: Block) if dd.symbol.isPrimaryConstructor => (dd, vps map (_.symbol), rhs) + } getOrElse abort("no constructor in template: impl = " + impl) // The parameter accessor fields which are members of the class val paramAccessors = clazz.constrParamAccessors @@ -509,27 +499,26 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { def parameterNamed(name: Name): Symbol = { def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING) - (constrParams filter matchesName) match { - case Nil => abort(name + " not in " + constrParams) + primaryConstrParams filter matchesName match { + case Nil => abort(name + " not in " + primaryConstrParams) case p :: _ => p } } - /* - * `usesSpecializedField` makes a difference in deciding whether constructor-statements - * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of - * one or more specialized sub-classes. - * - * Given that `usesSpecializedField` isn't read for any other purpose than the one described above, - * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with. - * That way, trips to a map in `specializeTypes` are saved. - */ - var usesSpecializedField: Boolean = false - // A transformer for expressions that go into the constructor - private class IntoCtorTransformer extends Transformer { - - private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz) + object intoConstructor extends Transformer { + /* + * `usesSpecializedField` makes a difference in deciding whether constructor-statements + * should be guarded in a `guardSpecializedFieldInit` class, ie in a class that's the generic super-class of + * one or more specialized sub-classes. + * + * Given that `usesSpecializedField` isn't read for any other purpose than the one described above, + * we skip setting `usesSpecializedField` in case the current class isn't `guardSpecializedFieldInit` to start with. + * That way, trips to a map in `specializeTypes` are saved. + */ + var usesSpecializedField: Boolean = false + + private def isParamRef(sym: Symbol) = sym.isParamAccessor && sym.owner == clazz // Terminology: a stationary location is never written after being read. private def isStationaryParamRef(sym: Symbol) = ( @@ -548,10 +537,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * (b.2) the constructor in the specialized (sub-)class. * (c) isn't part of a DelayedInit subclass. */ - private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym)) + private def canBeSupplanted(sym: Symbol) = !isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym) override def transform(tree: Tree): Tree = tree match { - case Apply(Select(This(_), _), List()) => // references to parameter accessor methods of own class become references to parameters // outer accessors become references to $outer parameter @@ -566,7 +554,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { // references to parameter accessor field of own class become references to parameters gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos - case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField` + case Select(_, _) if guardSpecializedFieldInit => // reasoning behind this guard in the docu of `usesSpecializedField` if (possiblySpecialized(tree.symbol)) { usesSpecializedField = true } @@ -576,18 +564,10 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { super.transform(tree) } - } - - private val intoConstructorTransformer = new IntoCtorTransformer - - // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol - def intoConstructor(oldowner: Symbol, tree: Tree) = - intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol) - - // Should tree be moved in front of super constructor call? - def canBeMoved(tree: Tree) = tree match { - case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR) - case _ => false + // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol + def apply(oldowner: Symbol, tree: Tree) = + if (tree eq EmptyTree) tree + else transform(tree.changeOwner(oldowner -> primaryConstr.symbol)) } // Create an assignment to class field `to` with rhs `from` @@ -629,10 +609,10 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { val classInitStatBuf = new ListBuffer[Tree] // generate code to copy pre-initialized fields - for (stat <- constrBody.stats) { + for (stat <- primaryConstrBody.stats) { constrStatBuf += stat stat match { - case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) => + case ValDef(mods, name, _, _) if mods hasFlag PRESUPER => // stat is the constructor-local definition of the field value val fields = presupers filter (_.getterName == name) assert(fields.length == 1) @@ -643,45 +623,58 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { } } - // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf. - for (stat <- stats) stat match { - case DefDef(_,_,_,_,_,rhs) => - // methods with constant result type get literals as their body + + for (stat <- stats) { + val statSym = stat.symbol + + stat match { + // recurse on class definition, store in defBuf + case _: ClassDef => defBuf += new ConstructorTransformer(unit).transform(stat) + // all methods except the primary constructor go into template - stat.symbol.tpe match { - case MethodType(List(), tp @ ConstantType(c)) => - defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp) - case _ => - if (stat.symbol.isPrimaryConstructor) () - else if (stat.symbol.isConstructor) auxConstructorBuf += stat - else defBuf += stat - } - case ValDef(mods, _, _, rhs) if !mods.hasStaticFlag => + case _: DefDef if statSym.isPrimaryConstructor => () + case _: DefDef if statSym.isConstructor => auxConstructorBuf += stat + + // other methods go to defBuf + // methods with ConstantType result get the corresponding Literal for their RHS + case _: DefDef => + val resTp = statSym.info.resultType + def mkLiteral(rhs: Tree) = gen.mkAttributedQualifier(resTp) setPos rhs.pos + + val literalized = + if (resTp.isInstanceOf[ConstantType] && statSym.info.params.isEmpty) deriveDefDef(stat)(mkLiteral) + else stat + + defBuf += literalized + // val defs with constant right-hand sides are eliminated. - // for all other val defs, an empty valdef goes into the template and - // the initializer goes as an assignment into the constructor - // if the val def is an early initialized or a parameter accessor, it goes - // before the superclass constructor call, otherwise it goes after. - // Lazy vals don't get the assignment in the constructor. - if (!stat.symbol.tpe.isInstanceOf[ConstantType]) { - if (rhs != EmptyTree && !stat.symbol.isLazy) { - val rhs1 = intoConstructor(stat.symbol, rhs) - (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign( - stat.symbol, rhs1) + case _: ValDef if statSym.info.isInstanceOf[ConstantType] => () + + // For all other val defs, an empty valdef goes into the template. + // Additionally, non-lazy vals are initialized by an assignment in: + // - the class initializer (static), + // - the constructor, before the super call (early initialized or a parameter accessor), + // - the constructor, after the super call (regular val). + case ValDef(mods, _, _, rhs) => + val initializingRhs = + if (statSym.isLazy) EmptyTree + else if (!mods.hasStaticFlag) intoConstructor(statSym, rhs) + else rhs + + if (initializingRhs ne EmptyTree) { + val initPhase = + if (mods hasFlag STATIC) classInitStatBuf + else if (mods hasFlag PRESUPER | PARAMACCESSOR) constrPrefixBuf + else constrStatBuf + + initPhase += mkAssign(statSym, initializingRhs) } + defBuf += deriveValDef(stat)(_ => EmptyTree) - } - case ValDef(_, _, _, rhs) => - // Add static initializer statements to classInitStatBuf and remove the rhs from the val def. - classInitStatBuf += mkAssign(stat.symbol, rhs) - defBuf += deriveValDef(stat)(_ => EmptyTree) - - case ClassDef(_, _, _, _) => - // classes are treated recursively, and left in the template - defBuf += new ConstructorTransformer(unit).transform(stat) - case _ => + // all other statements go into the constructor - constrStatBuf += intoConstructor(impl.symbol, stat) + case _ => constrStatBuf += intoConstructor(impl.symbol, stat) + } } populateOmittables() @@ -716,12 +709,12 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { rewriteDelayedInit() // Assemble final constructor - defBuf += deriveDefDef(constr)(_ => + defBuf += deriveDefDef(primaryConstr)(_ => treeCopy.Block( - constrBody, + primaryConstrBody, paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats ::: guardSpecializedInitializer(remainingConstrStats), - constrBody.expr)) + primaryConstrBody.expr)) // Followed by any auxiliary constructors defBuf ++= auxConstructorBuf @@ -732,6 +725,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { // Eliminate all field definitions that can be dropped from template val templateWithoutOmittables: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustBeKept(stat.symbol))) + // Add the static initializers val transformed: Template = addStaticInits(templateWithoutOmittables, classInitStatBuf, localTyper) -- cgit v1.2.3 From 6fd1fdb4aadf309fb60540d52108445f71272646 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 24 Aug 2015 12:56:24 -0700 Subject: Uncurry does Literal RHS for ConstantType def, not Constructors Uncurry seems more logical to me. Ideally, Erasure would erase ConstantTypes, since they do not exist in bytecode. In any case, doing this earlier, when we're rewriting method anyway, simplifies constructors, which should be focussing on, well, constructors (& fields). --- .../scala/tools/nsc/transform/Constructors.scala | 16 +++------------- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 18 +++++++++++++++--- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 2828e87bd8..6ecdd2b195 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -631,21 +631,11 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { // recurse on class definition, store in defBuf case _: ClassDef => defBuf += new ConstructorTransformer(unit).transform(stat) - // all methods except the primary constructor go into template + // methods (except primary constructor) go into template + // (non-primary ctors --> auxConstructorBuf / regular defs --> defBuf) case _: DefDef if statSym.isPrimaryConstructor => () case _: DefDef if statSym.isConstructor => auxConstructorBuf += stat - - // other methods go to defBuf - // methods with ConstantType result get the corresponding Literal for their RHS - case _: DefDef => - val resTp = statSym.info.resultType - def mkLiteral(rhs: Tree) = gen.mkAttributedQualifier(resTp) setPos rhs.pos - - val literalized = - if (resTp.isInstanceOf[ConstantType] && statSym.info.params.isEmpty) deriveDefDef(stat)(mkLiteral) - else stat - - defBuf += literalized + case _: DefDef => defBuf += stat // val defs with constant right-hand sides are eliminated. case _: ValDef if statSym.info.isInstanceOf[ConstantType] => () diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 72e2174bf8..79a77d7a0c 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -582,6 +582,7 @@ abstract class UnCurry extends InfoTransform } case dd @ DefDef(_, _, _, vparamss0, _, rhs0) => + val ddSym = dd.symbol val (newParamss, newRhs): (List[List[ValDef]], Tree) = if (dependentParamTypeErasure isDependent dd) dependentParamTypeErasure erase dd @@ -593,11 +594,22 @@ abstract class UnCurry extends InfoTransform (vparamss1, rhs0) } + // A no-arg method with ConstantType result type can safely be reduced to the corresponding Literal + // (only pure methods are typed as ConstantType). We could also do this for methods with arguments, + // after ensuring the arguments are not referenced. + val literalRhsIfConst = + if (newParamss.head.isEmpty) { // We know newParamss.length == 1 from above + ddSym.info.resultType match { + case tp@ConstantType(value) => Literal(value) setType tp setPos newRhs.pos // inlining of gen.mkAttributedQualifier(tp) + case _ => newRhs + } + } else newRhs + val flatdd = copyDefDef(dd)( vparamss = newParamss, - rhs = nonLocalReturnKeys get dd.symbol match { - case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol)) - case None => newRhs + rhs = nonLocalReturnKeys get ddSym match { + case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(literalRhsIfConst, k, ddSym)) + case None => literalRhsIfConst } ) addJavaVarargsForwarders(dd, flatdd) -- cgit v1.2.3 From b447e0f767079493acae46f5ba09b91aefa220c3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 27 Aug 2015 22:19:10 -0400 Subject: SI-9381 remove last vestiges of sun.misc.Unsafe now that Akka 2.4 doesn't need it anymore --- src/library/scala/concurrent/util/Unsafe.java | 38 --------------------------- 1 file changed, 38 deletions(-) delete mode 100644 src/library/scala/concurrent/util/Unsafe.java diff --git a/src/library/scala/concurrent/util/Unsafe.java b/src/library/scala/concurrent/util/Unsafe.java deleted file mode 100644 index 73739e377d..0000000000 --- a/src/library/scala/concurrent/util/Unsafe.java +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent.util; -import java.lang.reflect.Field; - -// TODO: remove once akka no longer needs it, hopefully by 2.12.0-M3! -@Deprecated -public final class Unsafe { - @Deprecated - public final static sun.misc.Unsafe instance; - static { - try { - sun.misc.Unsafe found = null; - for(Field field : sun.misc.Unsafe.class.getDeclaredFields()) { - if (field.getType() == sun.misc.Unsafe.class) { - field.setAccessible(true); - found = (sun.misc.Unsafe) field.get(null); - break; - } - } - if (found == null) throw new IllegalStateException("Can't find instance of sun.misc.Unsafe"); - else instance = found; - } catch(Throwable t) { - throw new ExceptionInInitializerError(t); - } - } -} - -// Scala version: -// classOf[sun.misc.Unsafe].getDeclaredFields.filter(_.getType == classOf[sun.misc.Unsafe]).headOption.map { field => -// field.setAccessible(true); field.get(null).asInstanceOf[sun.misc.Unsafe] -// } getOrElse (throw new IllegalStateException("Can't find instance of sun.misc.Unsafe")) -- cgit v1.2.3 From 2c16790ae48debe6e8b07b7ece86b0c665359cfd Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Sun, 30 Aug 2015 17:27:05 -0700 Subject: SI-9379 Added toString to .zipped to allow Stream etc to short-circuit Tuple2Zipped and Tuple3Zipped would try to compute a hash code when .toString was called on them. This overrides toString to print (collection1, collection2).zipped instead, using the collection's own toString method. This allows collections that have a toString but not a hashCode (such as Iterator.from(0) and s = 1 #:: s) to print out as they usually do. JUnit test to verify the deferral to collections' .toString. --- src/library/scala/runtime/Tuple2Zipped.scala | 2 ++ src/library/scala/runtime/Tuple3Zipped.scala | 2 ++ test/junit/scala/collection/immutable/StreamTest.scala | 16 ++++++++++++++++ 3 files changed, 20 insertions(+) diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index 512c4fbc27..1c432b0403 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -110,6 +110,8 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 return } } + + override def toString = "(%s, %s).zipped".format(colls._1.toString, colls._2.toString) } object Tuple2Zipped { diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index ffd44acf81..3bab86921b 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -118,6 +118,8 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers return } } + + override def toString: String = "(%s, %s, %s).zipped".format(colls._1.toString, colls._2.toString, colls._3.toString) } object Tuple3Zipped { diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala index fad4e502eb..1b257aabc4 100644 --- a/test/junit/scala/collection/immutable/StreamTest.scala +++ b/test/junit/scala/collection/immutable/StreamTest.scala @@ -107,4 +107,20 @@ class StreamTest { def withFilter_map_properly_lazy_in_tail: Unit = { assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2)) } + + @Test + def test_si9379() { + class Boom { + private var i = -1 + def inc = { + i += 1 + if (i > 1000) throw new NoSuchElementException("Boom! Too many elements!") + i + } + } + val b = new Boom + val s = Stream.continually(b.inc) + // zipped.toString must allow s to short-circuit evaluation + assertTrue((s, s).zipped.toString contains s.toString) + } } -- cgit v1.2.3 From 66a316a4acdb2584ef9d85f15b950f12c94d909c Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 2 Sep 2015 11:11:11 -0700 Subject: Streamline MethodSynthesis & Namers Give Getter control over whether a setter is needed. For now, only mutable ValDefs entail setters. In the new trait encoding, a trait val will also receive a setter from the start. Similarly, distinguish whether to derive a field from deferredness of the val. (Later, fields will not be emitted for traits, deferred or not.) --- .../scala/reflect/reify/phases/Reshape.scala | 3 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 127 +++++++++++++-------- .../scala/tools/nsc/typechecker/Namers.scala | 10 +- .../scala/reflect/internal/Definitions.scala | 2 + 4 files changed, 84 insertions(+), 58 deletions(-) diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 6c073c0b4c..091d42bb6d 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -325,8 +325,7 @@ trait Reshape { if (reifyDebug) println(s"reconstructed lazy val is $vdef1") vdef1::Nil case ddef: DefDef if ddef.symbol.isLazy => - def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty - if (hasUnitType(ddef.symbol)) { + if (isUnitType(ddef.symbol.info)) { // since lazy values of type Unit don't have val's // we need to create them from scratch toPreTyperLazyVal(ddef) :: Nil diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index f3856db552..5f5e13951d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -134,28 +134,35 @@ trait MethodSynthesis { ImplicitClassWrapper(tree).createAndEnterSymbol() } + // TODO: see if we can link symbol creation & tree derivation by sharing the Field/Getter/Setter factories def enterGetterSetter(tree: ValDef) { val ValDef(mods, name, _, _) = tree if (nme.isSetterName(name)) ValOrValWithSetterSuffixError(tree) - tree.symbol = ( + tree.symbol = if (mods.isLazy) { val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol() enterLazyVal(tree, lazyValGetter) } else { if (mods.isPrivateLocal) PrivateThisCaseClassParameterError(tree) - val getter = Getter(tree).createAndEnterSymbol() + val getter = Getter(tree) + val getterSym = getter.createAndEnterSymbol() + // Create the setter if necessary. - if (mods.isMutable) + if (getter.needsSetter) Setter(tree).createAndEnterSymbol() - // If abstract, the tree gets the getter's symbol. Otherwise, create a field. - if (mods.isDeferred) getter setPos tree.pos + // If the getter's abstract the tree gets the getter's symbol, + // otherwise, create a field (assume the getter requires storage). + // NOTE: we cannot look at symbol info, since we're in the process of deriving them + // (luckily, they only matter for lazy vals, which we've ruled out in this else branch, + // and `doNotDeriveField` will skip them if `!mods.isLazy`) + if (Field.noFieldFor(tree)) getterSym setPos tree.pos else enterStrictVal(tree) } - ) + enterBeans(tree) } @@ -177,11 +184,11 @@ trait MethodSynthesis { } def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { - case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) => + case vd @ ValDef(mods, name, tpt, rhs) if deriveAccessorTrees(vd) => // If we don't save the annotations, they seem to wander off. val annotations = stat.symbol.initialize.annotations val trees = ( - allValDefDerived(vd) + (field(vd) ::: standardAccessors(vd) ::: beanAccessors(vd)) map (acc => atPos(vd.pos.focus)(acc derive annotations)) filterNot (_ eq EmptyTree) ) @@ -221,11 +228,14 @@ trait MethodSynthesis { stat :: Nil } - def standardAccessors(vd: ValDef): List[DerivedFromValDef] = ( - if (vd.mods.isMutable && !vd.mods.isLazy) List(Getter(vd), Setter(vd)) - else if (vd.mods.isLazy) List(LazyValGetter(vd)) - else List(Getter(vd)) - ) + def standardAccessors(vd: ValDef): List[DerivedFromValDef] = + if (vd.mods.isLazy) List(LazyValGetter(vd)) + else { + val getter = Getter(vd) + if (getter.needsSetter) List(getter, Setter(vd)) + else List(getter) + } + def beanAccessors(vd: ValDef): List[DerivedFromValDef] = { val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil if (vd.symbol hasAnnotation BeanPropertyAttr) @@ -234,15 +244,8 @@ trait MethodSynthesis { BooleanBeanGetter(vd) :: setter else Nil } - def allValDefDerived(vd: ValDef) = { - val field = if (vd.mods.isDeferred || (vd.mods.isLazy && hasUnitType(vd.symbol))) Nil - else List(Field(vd)) - field ::: standardAccessors(vd) ::: beanAccessors(vd) - } - // Take into account annotations so that we keep annotated unit lazy val - // to get better error message already from the cps plugin itself - def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty + def field(vd: ValDef): List[Field] = if (Field.noFieldFor(vd)) Nil else List(Field(vd)) /** This trait assembles what's needed for synthesizing derived methods. * Important: Typically, instances of this trait are created TWICE for each derived @@ -260,7 +263,6 @@ trait MethodSynthesis { def name: TermName /** The flags that are retained from the original symbol */ - def flagsMask: Long /** The flags that the derived symbol has in addition to those retained from @@ -284,8 +286,9 @@ trait MethodSynthesis { def enclClass: Symbol // Final methods to make the rest easier to reason about. - final def mods = tree.mods - final def basisSym = tree.symbol + final def mods = tree.mods + final def basisSym = tree.symbol + final def derivedMods = mods & flagsMask | flagsExtra } sealed trait DerivedFromClassDef extends DerivedFromMemberDef { @@ -305,7 +308,6 @@ trait MethodSynthesis { /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */ final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter) final def fieldSelection = Select(This(enclClass), basisSym) - final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil) def derivedSym: Symbol = tree.symbol def derivedTree: Tree = EmptyTree @@ -314,8 +316,8 @@ trait MethodSynthesis { def isDeferred = mods.isDeferred def keepClean = false // whether annotations whose definitions are not meta-annotated should be kept. def validate() { } - def createAndEnterSymbol(): Symbol = { - val sym = owner.newMethod(name, tree.pos.focus, (tree.mods.flags & flagsMask) | flagsExtra) + def createAndEnterSymbol(): MethodSymbol = { + val sym = owner.newMethod(name, tree.pos.focus, derivedMods.flags) setPrivateWithin(tree, sym) enterInScope(sym) sym setInfo completer(sym) @@ -333,7 +335,8 @@ trait MethodSynthesis { } } sealed trait DerivedGetter extends DerivedFromValDef { - // TODO + // A getter must be accompanied by a setter if the ValDef is mutable. + def needsSetter = mods.isMutable } sealed trait DerivedSetter extends DerivedFromValDef { override def isSetter = true @@ -341,10 +344,12 @@ trait MethodSynthesis { case (p :: Nil) :: _ => p case _ => NoSymbol } - private def setterRhs = ( - if (mods.isDeferred || derivedSym.isOverloaded) EmptyTree + + // TODO: when is `derivedSym.isOverloaded`??? is it always an error? + private def setterRhs = + if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree else Assign(fieldSelection, Ident(setterParam)) - ) + private def setterDef = DefDef(derivedSym, setterRhs) override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef } @@ -363,8 +368,7 @@ trait MethodSynthesis { context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}") result } - def derivedTree: DefDef = - factoryMeth(mods & flagsMask | flagsExtra, name, tree) + def derivedTree: DefDef = factoryMeth(derivedMods, name, tree) def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC def flagsMask: Long = AccessFlags def name: TermName = tree.name.toTermName @@ -385,8 +389,9 @@ trait MethodSynthesis { } } case class Getter(tree: ValDef) extends BaseGetter(tree) { - override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getterIn(enclClass) - private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection + override def derivedSym = if (Field.noFieldFor(tree)) basisSym else basisSym.getterIn(enclClass) + private def derivedRhs = if (Field.noFieldFor(tree)) tree.rhs else fieldSelection + private def derivedTpt = { // For existentials, don't specify a type for the getter, even one derived // from the symbol! This leads to incompatible existentials for the field and @@ -400,19 +405,21 @@ trait MethodSynthesis { // Range position errors ensue if we don't duplicate this in some // circumstances (at least: concrete vals with existential types.) case ExistentialType(_, _) => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus) - case _ if mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field + case _ if isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field case tp => TypeTree(tp) } tpt setPos tree.tpt.pos.focus } override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt) } + /** Implements lazy value accessors: - * - for lazy values of type Unit and all lazy fields inside traits, - * the rhs is the initializer itself - * - for all other lazy values z the accessor is a block of this form: - * { z = ; z } where z can be an identifier or a field. - */ + * - for lazy values of type Unit and all lazy fields inside traits, + * the rhs is the initializer itself, because we'll just "compute" the result on every access + * ("computing" unit / constant type is free -- the side-effect is still only run once, using the init bitmap) + * - for all other lazy values z the accessor is a block of this form: + * { z = ; z } where z can be an identifier or a field. + */ case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) { class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol) extends ChangeOwnerTraverser(oldowner, newowner) { @@ -432,10 +439,10 @@ trait MethodSynthesis { override def derivedTree: DefDef = { val ValDef(_, _, tpt0, rhs0) = tree val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0) - val body = ( - if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1 + val body = + if (tree.symbol.owner.isTrait || Field.noFieldFor(tree)) rhs1 // TODO move tree.symbol.owner.isTrait into noFieldFor else gen.mkAssignAndReturn(basisSym, rhs1) - ) + derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)) // ValDef will have its position focused whereas DefDef will have original correct rangepos @@ -454,6 +461,24 @@ trait MethodSynthesis { override def derivedSym = basisSym.setterIn(enclClass) } + + object Field { + // No field for these vals (either never emitted or eliminated later on): + // - abstract vals have no value we could store (until they become concrete, potentially) + // - lazy vals of type Unit + // - [Emitted, later removed during AddInterfaces/Mixins] concrete vals in traits can't have a field + // - [Emitted, later removed during Constructors] a concrete val with a statically known value (Unit / ConstantType) + // performs its side effect according to lazy/strict semantics, but doesn't need to store its value + // each access will "evaluate" the RHS (a literal) again + // We would like to avoid emitting unnecessary fields, but the required knowledge isn't available until after typer. + // The only way to avoid emitting & suppressing, is to not emit at all until we are sure to need the field, as dotty does. + // NOTE: do not look at `vd.symbol` when called from `enterGetterSetter` (luckily, that call-site implies `!mods.isLazy`), + // as the symbol info is in the process of being created then. + // TODO: harmonize tree & symbol creation + // TODO: the `def field` call-site does not tollerate including `|| vd.symbol.owner.isTrait` --> tests break + def noFieldFor(vd: ValDef) = vd.mods.isDeferred || (vd.mods.isLazy && isUnitType(vd.symbol.info)) // || vd.symbol.owner.isTrait)) + } + case class Field(tree: ValDef) extends DerivedFromValDef { def name = tree.localName def category = FieldTargetClass @@ -462,11 +487,13 @@ trait MethodSynthesis { // By default annotations go to the field, except if the field is // generated for a class parameter (PARAMACCESSOR). override def keepClean = !mods.isParamAccessor - override def derivedTree = ( - if (mods.isDeferred) EmptyTree - else if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus) + + // handle lazy val first for now (we emit a Field even though we probably shouldn't...) + override def derivedTree = + if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus) + else if (Field.noFieldFor(tree)) EmptyTree else copyValDef(tree)(mods = mods | flagsExtra, name = this.name) - ) + } case class Param(tree: ValDef) extends DerivedFromValDef { def name = tree.name @@ -501,12 +528,12 @@ trait MethodSynthesis { // Derives a tree without attempting to use the original tree's symbol. override def derivedTree = { atPos(tree.pos.focus) { - DefDef(derivedMods, name, Nil, ListOfNil, tree.tpt.duplicate, + DefDef(derivedMods mapAnnotations (_ => Nil), name, Nil, ListOfNil, tree.tpt.duplicate, if (isDeferred) EmptyTree else Select(This(owner), tree.name) ) } } - override def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree) + override def createAndEnterSymbol(): MethodSymbol = enterSyntheticSym(derivedTree).asInstanceOf[MethodSymbol] } case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { } case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 4ad81b60ae..f54b330284 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -118,7 +118,7 @@ trait Namers extends MethodSynthesis { // PRIVATE | LOCAL are fields generated for primary constructor arguments // @PP: ...or fields declared as private[this]. PARAMACCESSOR marks constructor arguments. // Neither gets accessors so the code is as far as I know still correct. - def noEnterGetterSetter(vd: ValDef) = !vd.mods.isLazy && ( + def deriveAccessors(vd: ValDef) = vd.mods.isLazy || !( !owner.isClass || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor) || (vd.name startsWith nme.OUTER) @@ -126,7 +126,7 @@ trait Namers extends MethodSynthesis { || isEnumConstant(vd) ) - def noFinishGetterSetter(vd: ValDef) = ( + def deriveAccessorTrees(vd: ValDef) = !( (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this] || vd.symbol.isModuleVar || isEnumConstant(vd)) @@ -656,10 +656,8 @@ trait Namers extends MethodSynthesis { } def enterValDef(tree: ValDef) { - if (noEnterGetterSetter(tree)) - assignAndEnterFinishedSymbol(tree) - else - enterGetterSetter(tree) + if (deriveAccessors(tree)) enterGetterSetter(tree) + else assignAndEnterFinishedSymbol(tree) if (isEnumConstant(tree)) tree.symbol setInfo ConstantType(Constant(tree.symbol)) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 02fa3c882b..06fc453ed5 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -233,6 +233,8 @@ trait Definitions extends api.StandardDefinitions { || tp =:= AnyRefTpe ) + def isUnitType(tp: Type) = tp.typeSymbol == UnitClass && tp.annotations.isEmpty + def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info) def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match { case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe) -- cgit v1.2.3 From cc70cfb5024729935b5dd9be385847bd43c90b04 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 24 Aug 2015 18:47:31 -0700 Subject: Simplify decision whether to derive accessors Originally (modulo renaming & reduction of double negation in previous commit): ``` def deriveAccessors(vd: ValDef) = vd.mods.isLazy || !( !owner.isClass || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor) // this is an error -- now checking first || (vd.name startsWith nme.OUTER) || (context.unit.isJava) // pulled out to caller || isEnumConstant(vd) ) def deriveAccessorTrees(vd: ValDef) = !( (vd.mods.isPrivateLocal && !vd.mods.isLazy) // lazy was pulled out to outer disjunction || vd.symbol.isModuleVar // pulled out to caller || isEnumConstant(vd)) ``` With changes in comments above, these conditions are now captured by one method. --- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 13 +++------ .../scala/tools/nsc/typechecker/Namers.scala | 31 +++++++++++----------- 3 files changed, 19 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b0bd9977a8..1b2e7f628e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1105,7 +1105,7 @@ trait ContextErrors { def GetterDefinedTwiceError(getter: Symbol) = issueSymbolTypeError(getter, getter+" is defined twice") - def ValOrValWithSetterSuffixError(tree: Tree) = + def ValOrVarWithSetterSuffixError(tree: Tree) = issueNormalTypeError(tree, "Names of vals or vars may not end in `_='") def PrivateThisCaseClassParameterError(tree: Tree) = diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 5f5e13951d..5fdddf0641 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -135,18 +135,12 @@ trait MethodSynthesis { } // TODO: see if we can link symbol creation & tree derivation by sharing the Field/Getter/Setter factories - def enterGetterSetter(tree: ValDef) { - val ValDef(mods, name, _, _) = tree - if (nme.isSetterName(name)) - ValOrValWithSetterSuffixError(tree) - + def enterGetterSetter(tree: ValDef): Unit = { tree.symbol = - if (mods.isLazy) { + if (tree.mods.isLazy) { val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol() enterLazyVal(tree, lazyValGetter) } else { - if (mods.isPrivateLocal) - PrivateThisCaseClassParameterError(tree) val getter = Getter(tree) val getterSym = getter.createAndEnterSymbol() @@ -163,7 +157,6 @@ trait MethodSynthesis { else enterStrictVal(tree) } - enterBeans(tree) } @@ -184,7 +177,7 @@ trait MethodSynthesis { } def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { - case vd @ ValDef(mods, name, tpt, rhs) if deriveAccessorTrees(vd) => + case vd @ ValDef(mods, name, tpt, rhs) if deriveAccessors(vd) && !vd.symbol.isModuleVar => // If we don't save the annotations, they seem to wander off. val annotations = stat.symbol.initialize.annotations val trees = ( diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index f54b330284..254cb7111c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -115,21 +115,14 @@ trait Namers extends MethodSynthesis { TypeSigError(tree, ex) alt } - // PRIVATE | LOCAL are fields generated for primary constructor arguments - // @PP: ...or fields declared as private[this]. PARAMACCESSOR marks constructor arguments. - // Neither gets accessors so the code is as far as I know still correct. - def deriveAccessors(vd: ValDef) = vd.mods.isLazy || !( - !owner.isClass - || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor) - || (vd.name startsWith nme.OUTER) - || (context.unit.isJava) - || isEnumConstant(vd) - ) - def deriveAccessorTrees(vd: ValDef) = !( - (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this] - || vd.symbol.isModuleVar - || isEnumConstant(vd)) + // All lazy vals need accessors, including those owned by terms (e.g., in method) or private[this] in a class + def deriveAccessors(vd: ValDef) = vd.mods.isLazy || (owner.isClass && deriveAccessorsInClass(vd)) + + private def deriveAccessorsInClass(vd: ValDef) = + !vd.mods.isPrivateLocal && // note, private[this] lazy vals do get accessors -- see outer disjunction of deriveAccessors + !(vd.name startsWith nme.OUTER) && // outer accessors are added later, in explicitouter + !isEnumConstant(vd) // enums can only occur in classes, so only check here /** Determines whether this field holds an enum constant. * To qualify, the following conditions must be met: @@ -655,8 +648,14 @@ trait Namers extends MethodSynthesis { } } - def enterValDef(tree: ValDef) { - if (deriveAccessors(tree)) enterGetterSetter(tree) + def enterValDef(tree: ValDef): Unit = { + val isScala = !context.unit.isJava + if (isScala) { + if (nme.isSetterName(tree.name)) ValOrVarWithSetterSuffixError(tree) + if (tree.mods.isPrivateLocal && tree.mods.isCaseAccessor) PrivateThisCaseClassParameterError(tree) + } + + if (isScala && deriveAccessors(tree)) enterGetterSetter(tree) else assignAndEnterFinishedSymbol(tree) if (isEnumConstant(tree)) -- cgit v1.2.3 From df61ab67d3c32e8e996874206299938f5bd1584d Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 1 Sep 2015 17:06:33 -0700 Subject: Review feedback from retronym --- src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 5fdddf0641..f3632b144d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -338,10 +338,11 @@ trait MethodSynthesis { case _ => NoSymbol } - // TODO: when is `derivedSym.isOverloaded`??? is it always an error? - private def setterRhs = + private def setterRhs = { + assert(!derivedSym.isOverloaded, s"Unexpected overloaded setter $derivedSym for $basisSym in $enclClass") if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree else Assign(fieldSelection, Ident(setterParam)) + } private def setterDef = DefDef(derivedSym, setterRhs) override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef -- cgit v1.2.3 From b6f070669534d66b2b01d98d91104f74fe8950e1 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sat, 5 Sep 2015 00:25:42 +0200 Subject: SI-7155 Remove deprecated private s.c.m.AVLTree --- src/library/scala/collection/mutable/AVLTree.scala | 250 --------------------- test/files/scalacheck/avl.scala | 112 --------- 2 files changed, 362 deletions(-) delete mode 100644 src/library/scala/collection/mutable/AVLTree.scala delete mode 100644 test/files/scalacheck/avl.scala diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala deleted file mode 100644 index b63d0aae33..0000000000 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ /dev/null @@ -1,250 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala -package collection -package mutable - -/** - * An immutable AVL Tree implementation formerly used by mutable.TreeSet - * - * @author Lucien Pereira - */ -@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2") -private[mutable] sealed trait AVLTree[+A] extends Serializable { - def balance: Int - - def depth: Int - - def iterator[B >: A]: Iterator[B] = Iterator.empty - - def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false - - /** - * Returns a new tree containing the given element. - * Throws an IllegalArgumentException if element is already present. - * - */ - def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf) - - /** - * Return a new tree which not contains given element. - * - */ - def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = - throw new NoSuchElementException(String.valueOf(value)) - - /** - * Return a tuple containing the smallest element of the provided tree - * and a new tree from which this element has been extracted. - * - */ - def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.") - - /** - * Return a tuple containing the biggest element of the provided tree - * and a new tree from which this element has been extracted. - * - */ - def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.") - - def rebalance[B >: A]: AVLTree[B] = this - - def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.") - - def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.") - - def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.") - - def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.") -} - -/** - * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0") - */ -private case object Leaf extends AVLTree[Nothing] { - override val balance: Int = 0 - - override val depth: Int = -1 -} - -/** - * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0") - */ -private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] { - override val balance: Int = right.depth - left.depth - - override val depth: Int = math.max(left.depth, right.depth) + 1 - - override def iterator[B >: A]: Iterator[B] = new AVLIterator(this) - - override def contains[B >: A](value: B, ordering: Ordering[B]) = { - val ord = ordering.compare(value, data) - if (0 == ord) - true - else if (ord < 0) - left.contains(value, ordering) - else - right.contains(value, ordering) - } - - /** - * Returns a new tree containing the given element. - * Throws an IllegalArgumentException if element is already present. - * - */ - override def insert[B >: A](value: B, ordering: Ordering[B]) = { - val ord = ordering.compare(value, data) - if (0 == ord) - throw new IllegalArgumentException() - else if (ord < 0) - Node(data, left.insert(value, ordering), right).rebalance - else - Node(data, left, right.insert(value, ordering)).rebalance - } - - /** - * Return a new tree which not contains given element. - * - */ - override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = { - val ord = ordering.compare(value, data) - if(ord == 0) { - if (Leaf == left) { - if (Leaf == right) { - Leaf - } else { - val (min, newRight) = right.removeMin - Node(min, left, newRight).rebalance - } - } else { - val (max, newLeft) = left.removeMax - Node(max, newLeft, right).rebalance - } - } else if (ord < 0) { - Node(data, left.remove(value, ordering), right).rebalance - } else { - Node(data, left, right.remove(value, ordering)).rebalance - } - } - - /** - * Return a tuple containing the smallest element of the provided tree - * and a new tree from which this element has been extracted. - * - */ - override def removeMin[B >: A]: (B, AVLTree[B]) = { - if (Leaf == left) - (data, right) - else { - val (min, newLeft) = left.removeMin - (min, Node(data, newLeft, right).rebalance) - } - } - - /** - * Return a tuple containing the biggest element of the provided tree - * and a new tree from which this element has been extracted. - * - */ - override def removeMax[B >: A]: (B, AVLTree[B]) = { - if (Leaf == right) - (data, left) - else { - val (max, newRight) = right.removeMax - (max, Node(data, left, newRight).rebalance) - } - } - - override def rebalance[B >: A] = { - if (-2 == balance) { - if (1 == left.balance) - doubleRightRotation - else - rightRotation - } else if (2 == balance) { - if (-1 == right.balance) - doubleLeftRotation - else - leftRotation - } else { - this - } - } - - override def leftRotation[B >: A] = { - if (Leaf != right) { - val r: Node[A] = right.asInstanceOf[Node[A]] - Node(r.data, Node(data, left, r.left), r.right) - } else sys.error("Should not happen.") - } - - override def rightRotation[B >: A] = { - if (Leaf != left) { - val l: Node[A] = left.asInstanceOf[Node[A]] - Node(l.data, l.left, Node(data, l.right, right)) - } else sys.error("Should not happen.") - } - - override def doubleLeftRotation[B >: A] = { - if (Leaf != right) { - val r: Node[A] = right.asInstanceOf[Node[A]] - // Let's save an instanceOf by 'inlining' the left rotation - val rightRotated = r.rightRotation - Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right) - } else sys.error("Should not happen.") - } - - override def doubleRightRotation[B >: A] = { - if (Leaf != left) { - val l: Node[A] = left.asInstanceOf[Node[A]] - // Let's save an instanceOf by 'inlining' the right rotation - val leftRotated = l.leftRotation - Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right)) - } else sys.error("Should not happen.") - } -} - -/** - * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0") - */ -private class AVLIterator[A](root: Node[A]) extends Iterator[A] { - val stack = mutable.ArrayStack[Node[A]](root) - diveLeft() - - private def diveLeft(): Unit = { - if (Leaf != stack.head.left) { - val left: Node[A] = stack.head.left.asInstanceOf[Node[A]] - stack.push(left) - diveLeft() - } - } - - private def engageRight(): Unit = { - if (Leaf != stack.head.right) { - val right: Node[A] = stack.head.right.asInstanceOf[Node[A]] - stack.pop() - stack.push(right) - diveLeft() - } else - stack.pop() - } - - override def hasNext: Boolean = !stack.isEmpty - - override def next(): A = { - if (stack.isEmpty) - throw new NoSuchElementException() - else { - val result = stack.head.data - // Let's maintain stack for the next invocation - engageRight() - result - } - } -} diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala deleted file mode 100644 index 4cfacaf407..0000000000 --- a/test/files/scalacheck/avl.scala +++ /dev/null @@ -1,112 +0,0 @@ -import org.scalacheck.Gen -import org.scalacheck.Prop.forAll -import org.scalacheck.Properties - -package scala.collection.mutable { - - /** - * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1] - */ - abstract class AVLTreeTest(name: String) extends Properties(name) { - - def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2) - - def capacityMax(depth: Int): Int = `2^`(depth+1) - 1 - - def minDepthForCapacity(x: Int): Int = { - var depth = 0 - while(capacityMax(depth) < x) - depth += 1 - depth - } - - def numberOfElementsInLeftSubTree(n: Int): collection.immutable.IndexedSeq[Int] = { - val mid = n/2 + n%2 - ((1 until mid) - .filter { i => math.abs(minDepthForCapacity(i) - minDepthForCapacity(n-i)) < 2 } - .flatMap { i => Seq(i, n-(i+1)) }).toIndexedSeq.distinct - } - - def makeAllBalancedTree[A](elements: List[A]): List[AVLTree[A]] = elements match { - case Nil => Leaf::Nil - case first::Nil => Node(first, Leaf, Leaf)::Nil - case first::second::Nil => Node(second, Node(first, Leaf, Leaf), Leaf)::Node(first, Leaf, Node(second, Leaf, Leaf))::Nil - case first::second::third::Nil => Node(second, Node(first, Leaf, Leaf), Node(third, Leaf, Leaf))::Nil - case _ => { - val combinations = for { - left <- numberOfElementsInLeftSubTree(elements.size) - root = elements(left) - right = elements.size - (left + 1) - } yield (root, left, right) - (combinations.flatMap(triple => for { - l <- makeAllBalancedTree(elements.take(triple._2)) - r <- makeAllBalancedTree(elements.takeRight(triple._3)) - } yield Node(triple._1, l, r))).toList - } - } - - def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for { - size <- org.scalacheck.Gen.choose(20, 25) - elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000)) - selected <- org.scalacheck.Gen.choose(0, 1000) - } yield { - // selected mustn't be in elements already - val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2)) - (selected*2+1, list) - } - - def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for { - size <- org.scalacheck.Gen.choose(20, 25) - elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000)) - e = elements.sorted.distinct - selected <- org.scalacheck.Gen.choose(0, e.size-1) - } yield { - // selected must be in elements already - val list = makeAllBalancedTree(e) - (e(selected), list) - } - } - - trait AVLInvariants { - self: AVLTreeTest => - - def isBalanced[A](t: AVLTree[A]): Boolean = t match { - case node: Node[A] => math.abs(node.balance) < 2 && (List(node.left, node.right) forall isBalanced) - case Leaf => true - } - - def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) { - case (selected: Int, trees: List[AVLTree[Int]]) => - trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b) - } - - property("Every tree is initially balanced.") = setup(isBalanced) - } - - object TestInsert extends AVLTreeTest("Insert") with AVLInvariants { - import math.Ordering.Int - property("`insert` creates a new tree containing the given element. The tree remains balanced.") = forAll(genInput) { - case (selected: Int, trees: List[AVLTree[Int]]) => - trees.map(tree => { - val modifiedTree = tree.insert(selected, Int) - modifiedTree.contains(selected, Int) && isBalanced(modifiedTree) - }).fold(true)((a, b) => a && b) - } - } - - object TestRemove extends AVLTreeTest("Remove") with AVLInvariants { - import math.Ordering.Int - property("`remove` creates a new tree without the given element. The tree remains balanced.") = forAll(genInputDelete) { - case (selected: Int, trees: List[AVLTree[Int]]) => - trees.map(tree => { - val modifiedTree = tree.remove(selected, Int) - tree.contains(selected, Int) && !modifiedTree.contains(selected, Int) && isBalanced(modifiedTree) - }).fold(true)((a, b) => a && b) - } - } -} - -object Test extends Properties("AVL") { - include(scala.collection.mutable.TestInsert) - include(scala.collection.mutable.TestRemove) -} -- cgit v1.2.3 From ad17b552b8f7c3a435ad8de71cd7268f905a1b8d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 8 Sep 2015 13:23:22 -0400 Subject: Merge remote-tracking branch 'origin/2.11.x' into 2.12.x only trivial merge conflicts here. not dealing with PR #4333 in this merge because there is a substantial conflict there -- so that's why I stopped at 63daba33ae99471175e9d7b20792324615f5999b for now --- CONTRIBUTING.md | 4 +- README.md | 76 ++++++++++---------- docs/TODO | 2 +- scripts/jobs/integrate/bootstrap | 6 +- spec/01-lexical-syntax.md | 7 +- spec/05-classes-and-objects.md | 2 +- spec/06-expressions.md | 2 +- spec/07-implicits.md | 2 +- spec/12-the-scala-standard-library.md | 6 +- spec/15-changelog.md | 4 +- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 10 +-- .../scala/tools/nsc/backend/icode/GenICode.scala | 4 +- .../tools/nsc/backend/icode/ICodeCheckers.scala | 2 +- .../tools/nsc/backend/jvm/BCodeBodyBuilder.scala | 4 +- .../scala/tools/nsc/backend/jvm/BTypes.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 2 +- .../backend/jvm/analysis/NullnessAnalyzer.scala | 2 +- .../nsc/backend/jvm/opt/ClosureOptimizer.scala | 2 +- .../scala/tools/nsc/backend/jvm/opt/Inliner.scala | 6 +- src/compiler/scala/tools/nsc/io/Jar.scala | 2 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- .../scala/tools/nsc/transform/Constructors.scala | 2 +- .../scala/tools/nsc/transform/Delambdafy.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 41 ++++++++++- .../scala/tools/nsc/typechecker/Macros.scala | 9 ++- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 9 ++- .../scala/tools/nsc/typechecker/Typers.scala | 6 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 2 +- .../scala/tools/reflect/ReflectGlobal.scala | 16 +++++ src/library-aux/scala/Any.scala | 2 +- src/library/scala/AnyVal.scala | 2 +- src/library/scala/Equals.scala | 5 +- src/library/scala/Option.scala | 2 +- .../scala/collection/immutable/HashSet.scala | 1 + .../scala/collection/immutable/ListSet.scala | 2 + .../scala/collection/immutable/MapLike.scala | 5 ++ src/library/scala/collection/immutable/Set.scala | 34 +++++++-- .../scala/collection/immutable/SortedMap.scala | 6 ++ .../scala/collection/mutable/PriorityQueue.scala | 15 +++- src/library/scala/concurrent/SyncVar.scala | 2 +- src/library/scala/io/Codec.scala | 2 +- .../reflect/ClassManifestDeprecatedApis.scala | 4 +- src/library/scala/runtime/ScalaRunTime.scala | 2 +- src/library/scala/util/Sorting.scala | 2 +- src/reflect/scala/reflect/api/Types.scala | 2 +- .../reflect/internal/ClassfileConstants.scala | 2 +- src/reflect/scala/reflect/internal/Kinds.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 4 +- .../scala/reflect/internal/transform/UnCurry.scala | 22 ++++-- .../scala/reflect/runtime/JavaUniverseForce.scala | 2 +- src/repl/scala/tools/nsc/interpreter/IMain.scala | 6 +- .../scala/tools/nsc/doc/html/page/Template.scala | 2 +- .../nsc/doc/html/page/diagram/DotRunner.scala | 2 +- test/files/neg/name-lookup-stable.check | 2 +- test/files/neg/name-lookup-stable.scala | 2 +- test/files/neg/t5376.scala | 2 +- test/files/neg/t8597b.scala | 2 +- test/files/neg/t8675b.scala | 2 +- test/files/neg/virtpatmat_exhaust_compound.scala | 2 +- test/files/pos/t2405.scala | 4 +- test/files/pos/t8002-nested-scope.scala | 2 +- test/files/pos/t9442.scala | 14 ++++ test/files/run/dead-code-elimination.scala | 2 +- test/files/run/names-defaults.scala | 2 +- test/files/run/nothingTypeNoOpt.scala | 2 +- test/files/run/repl-power.check | 3 + test/files/run/repl-power.scala | 5 +- test/files/run/t8047.scala | 2 +- test/files/run/toolbox_expand_macro.check | 1 + test/files/run/toolbox_expand_macro.scala | 23 ++++++ .../scala/collection/immutable/SetTests.scala | 81 ++++++++++++++++++++++ .../jvm/analysis/ProdConsAnalyzerTest.scala | 2 +- 74 files changed, 392 insertions(+), 133 deletions(-) create mode 100644 test/files/pos/t9442.scala create mode 100644 test/files/run/toolbox_expand_macro.check create mode 100644 test/files/run/toolbox_expand_macro.scala create mode 100644 test/junit/scala/collection/immutable/SetTests.scala diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e6557d78dd..d01a71b9bd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -86,7 +86,7 @@ A pull request should consist of commits with messages that clearly state what p Commit logs should be stated in the active, present tense. -A commit's subject should be 60 characters or less. Overall, think of +A commit's subject should be 72 characters or less. Overall, think of the first line of the commit as a description of the action performed by the commit on the code base, so use the active voice and the present tense. That also makes the commit subjects easy to reuse in @@ -113,7 +113,7 @@ Our pull request bot, Scabot, automatically builds all the commits in a PR indiv Click on the little x next to a commit sha to go to the overview of the PR validation job. To diagnose a failure, consult the console output of the job that failed. -See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) and [Scabot repo](https://github.com/) for full details on PR validation. One tip you should know is that commenting `/rebuild` on a PR asks validation to be run again on the same commits. This is only necessary when a spurious failure occurred. +See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) and [Scabot repo](https://github.com/scala/scabot) for full details on PR validation. One tip you should know is that commenting `/rebuild` on a PR asks validation to be run again on the same commits. This is only necessary when a spurious failure occurred. ### Pass code review diff --git a/README.md b/README.md index bdb7fcbc28..7ee3cac42b 100644 --- a/README.md +++ b/README.md @@ -25,13 +25,14 @@ If you need some help with your PR at any time, please feel free to @-mention an | [`@dickwall`](https://github.com/dickwall) | process & community, documentation | | [`@dragos`](https://github.com/dragos) | specialization, back end | | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | + | [`@janekdb`](https://github.com/janekdb) | documentation | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! # Handy Links - [A wealth of documentation](http://docs.scala-lang.org) - [Scala CI](https://scala-ci.typesafe.com/) - - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/); + - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/) - [(Deprecated) Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/) - Scala mailing lists: - [Compiler and standard library development](https://groups.google.com/group/scala-internals) @@ -78,7 +79,7 @@ To pinpoint bugs, we often use git bisect, which is only effective when we can c This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one). -Please do not @mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)). +Please do not @-mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @-mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)). ## Reviews @@ -91,13 +92,18 @@ To help you plan your contributions, we communicate our plans on a regular basis ## Reviewing -Once you've gained some experience with the code base and the process, the logical next step is to offers reviews for others's contributions. The main goal of this whole process, in the end, is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part! +Once you've gained some experience with the code base and the process, the next step is to review the contributions of others. + +The main goal of this whole process is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part! ## [Labels](https://github.com/scala/scala/labels) - - `reviewed` automatically added by scabot when a comment prefixed with LGTM is posted - - `welcome` reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes) - - `release-notes` reviewer / queue curator adds to make sure this PR is highlighted in the release notes - - `on-hold` added when this PR should not yet be merged, even though CI is green + +Label | Description +--------------- | ----------- +`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted +`welcome` | reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes) +`release-notes` | reviewer / queue curator adds to make sure this PR is highlighted in the release notes +`on-hold` | added when this PR should not yet be merged, even though CI is green ### Tips & Tricks Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows: @@ -126,14 +132,23 @@ Use the latest IntelliJ IDEA release and install the Scala plugin from within th The following steps are required to use IntelliJ IDEA on Scala trunk - Run `ant init`. This will download some JARs to `./build/deps`, which are included in IntelliJ's classpath. - - Run src/intellij/setup.sh - - Open ./src/intellij/scala.ipr in IntelliJ - - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the Java 1.6 SDK. + - Run `./src/intellij/setup.sh`. + - Open `./src/intellij/scala.ipr` in IntelliJ. + - `File` → `Project Structure` → `Project` → `Project SDK`. Create an SDK entry named "1.6" containing the Java 1.6 SDK. (You may use a later SDK for local development, but the CI will verify against Java 6.) -Compilation within IDEA is performed in "-Dlocker.skip=1" mode: the sources are built -directly using the STARR compiler (which is downloaded from maven, according to `starr.version` in `versions.properties`). +Compilation within IDEA is performed in `-Dlocker.skip=1` mode: the sources are built +directly using the STARR compiler (which is downloaded from [the Central Repository](http://central.sonatype.org/), according to `starr.version` in `versions.properties`). + +## Building with sbt (EXPERIMENTAL) +The experimental sbt-based build definition has arrived! Run `sbt package` +to build the compiler. You can run `sbt test` to run unit (JUnit) tests. +Use `sbt test/it:test` to run integration (partest) tests. + +We would like to migrate to sbt build as quickly as possible. If you would +like to help please use the scala-internals mailing list to discuss your +ideas and coordinate your effort with others. ## Building with Ant @@ -144,25 +159,19 @@ Verify your build using `ant test-opt`. The Scala build system is based on Apache Ant. Most required pre-compiled libraries are part of the repository (in 'lib/'). The following however is -assumed to be installed on the build machine: +assumed to be installed on the build machine: TODO -## Building with Sbt (EXPERIMENTAL) - -The experimental sbt-based build definition has arrived! Run `sbt package` -to build the compiler. You can run `sbt test` to run unit (JUnit) tests. -Use `sbt test/it:test` to run integration (partest) tests. - -We would like to migrate to sbt build as quickly as possible. If you would -like to help please use the scala-internals mailing list to discuss your -ideas and coordinate your effort with others. - -### Tips and tricks +### Ant Tips and tricks Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant). - - `./pull-binary-libs.sh` downloads all binary artifacts associated with this commit. - - `ant -p` prints out information about the commonly used ant targets. - - `ant` or `ant build`: A quick compilation (to build/quick) of your changes using the locker compiler. +Command | Description +----------------------- | ----------- +`./pull-binary-libs.sh` | downloads all binary artifacts associated with this commit. +`ant -p` | prints out information about the commonly used ant targets. +`ant` or `ant build` | A quick compilation (to `build/quick`) of your changes using the locker compiler. +`ant dist` | builds a distribution in 'dists/latest'. +`ant all.clean` | removes all build files and all distributions. A typical debug cycle incrementally builds quick, then uses it to compile and run the file `sandbox/test.scala` as follows: @@ -174,17 +183,14 @@ We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick `ant test-opt` tests that your code is working and fit to be committed: - Runs the test suite and bootstrapping test on quick. - - You can run the suite only (skipping strap) with 'ant test.suite'. + - You can run the suite only (skipping strap) with `ant test.suite`. `ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick. -Note: on most machines this requires more heap than is allocate by default. You can adjust the parameters with ANT_OPTS. Example command line: +Note: on most machines this requires more heap than is allocated by default. You can adjust the parameters with `ANT_OPTS`. Example command line: +```sh +ANT_OPTS="-Xms512M -Xmx2048M -Xss1M" ant docs ``` -ANT_OPTS = "-Xms512M -Xmx2048M -Xss1M" ant docs -``` - - - `ant dist` builds a distribution in 'dists/latest'. - - `ant all.clean` Removes all build files and all distributions. ### Bootstrapping concepts NOTE: This is somewhat outdated, but the ideas still hold. @@ -194,7 +200,7 @@ compiles Scala in layers. Each layer is a complete compiled Scala compiler and l A superior layer is always compiled by the layer just below it. Here is a short description of the four layers that the build uses, from bottom to top: - - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from maven central. + - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from the Central Repository. - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`). - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code. - `strap`: a test layer used to check stability of the build. diff --git a/docs/TODO b/docs/TODO index 094202f53e..558aa87205 100644 --- a/docs/TODO +++ b/docs/TODO @@ -53,7 +53,7 @@ The process is about the same for symbols in PolyTypes. The main difference is that type parameters may be referenced and thus we - need something like De Bruijn indicies to represent these + need something like De Bruijn indices to represent these references. diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 706a20daf4..b2ae195dc4 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -33,7 +33,7 @@ # - Set _VER to override the default, e.g. XML_VER="1.0.4". # - The git revision is set to _REF="v$_VER". Make sure the tag exists (you can't override _REF). # -# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version nubmers for modules. +# - Otherwise (moduleVersioning has some other value): in this mode we use nightly version numbers for modules. # - By default the script sets all _REF to "HEAD", override to build a specific revision. # - The _VER is set to a nightly version, for example "1.0.3-7-g14888a2-nightly" (you can't override _VER) @@ -311,7 +311,7 @@ scalaVerToBinary() { # - the suffix starts with "-bin": 2.12.0-bin-M1 # - the patch version is > 0 : 2.12.1-M1, 1.12.3-RC2, 2.12.1-sha-nightly, 2.12.2-SNAPSHOT # - # Othwersise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT + # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT # # Adapted from sbt: https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39 # @@ -513,7 +513,7 @@ bootstrap() { echo "### Bootstrapping Scala using locker" - # # TODO: close all open staging repos so that we can be reaonably sure the only open one we see after publishing below is ours + # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours # # the ant call will create a new one # # Rebuild Scala with these modules so that all binary versions are consistent. diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 06e3a458a4..72ae6e8794 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -450,9 +450,10 @@ multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} A multi-line string literal is a sequence of characters enclosed in triple quotes `""" ... """`. The sequence of characters is arbitrary, except that it may contain three or more consecutive quote characters -only at the very end. In particular, embedded newlines -are permitted. Unicode escapes work as everywhere else, but none -of the [escape sequences](#escape-sequences) are interpreted. +only at the very end. Characters +must not necessarily be printable; newlines or other +control characters are also permitted. Unicode escapes work as everywhere else, but none +of the escape sequences [here](#escape-sequences) are interpreted. > ```scala > """the present string diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index 3a70f2a137..69828ec7fe 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -631,7 +631,7 @@ class Outer { ``` Here, accesses to the method `f` can appear anywhere within -`OuterClass`, but not outside it. Accesses to method +`Outer`, but not outside it. Accesses to method `g` can appear anywhere within the package `outerpkg.innerpkg`, as would be the case for package-private methods in Java. Finally, accesses to method diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 85e288bf5f..9cd58ea346 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1736,7 +1736,7 @@ so `scala.Any` is the type inferred for `a`. _Eta-expansion_ converts an expression of method type to an equivalent expression of function type. It proceeds in two steps. -First, one identifes the maximal sub-expressions of $e$; let's +First, one identifies the maximal sub-expressions of $e$; let's say these are $e_1 , \ldots , e_m$. For each of these, one creates a fresh name $x_i$. Let $e'$ be the expression resulting from replacing every maximal subexpression $e_i$ in $e$ by the diff --git a/spec/07-implicits.md b/spec/07-implicits.md index 5e10373959..726320ed33 100644 --- a/spec/07-implicits.md +++ b/spec/07-implicits.md @@ -84,7 +84,7 @@ The _parts_ of a type $T$ are: - if $T$ is an abstract type, the parts of its upper bound; - if $T$ denotes an implicit conversion to a type with a method with argument types $T_1 , \ldots , T_n$ and result type $U$, the union of the parts of $T_1 , \ldots , T_n$ and $U$; -- the parts of quantified (existential or univeral) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`); +- the parts of quantified (existential or universal) and annotated types are defined as the parts of the underlying types (e.g., the parts of `T forSome { ... }` are the parts of `T`); - in all other cases, just $T$ itself. Note that packages are internally represented as classes with companion modules to hold the package members. diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md index e76035f458..8f65191312 100644 --- a/spec/12-the-scala-standard-library.md +++ b/spec/12-the-scala-standard-library.md @@ -171,7 +171,7 @@ Any numeric value type $T$ supports the following methods. evaluated by converting the receiver and its argument to their operation type and performing the given arithmetic operation of that type. - * Parameterless arithmethic methods identity (`+`) and negation + * Parameterless arithmetic methods identity (`+`) and negation (`-`), with result type $T$. The first of these returns the receiver unchanged, whereas the second returns its negation. * Conversion methods `toByte`, `toShort`, `toChar`, @@ -194,7 +194,7 @@ Integer numeric value types support in addition the following operations: operation of that type. * A parameterless bit-negation method (`~`). Its result type is - the reciver type $T$ or `Int`, whichever is larger. + the receiver type $T$ or `Int`, whichever is larger. The operation is evaluated by converting the receiver to the result type and negating every bit in its value. * Bit-shift methods left-shift (`<<`), arithmetic right-shift @@ -745,7 +745,7 @@ object Predef { def readf2(format: String) = Console.readf2(format) def readf3(format: String) = Console.readf3(format) - // Implict conversions ------------------------------------------------ + // Implicit conversions ------------------------------------------------ ... } diff --git a/spec/15-changelog.md b/spec/15-changelog.md index 3c8739359a..751a571ecc 100644 --- a/spec/15-changelog.md +++ b/spec/15-changelog.md @@ -31,7 +31,7 @@ formal parameter types. Added section on [numeric widening](06-expressions.html#numeric-widening) to support weak conformance. -Tightened rules to avoid accidential [overrides](05-classes-and-objects.html#overriding). +Tightened rules to avoid accidental [overrides](05-classes-and-objects.html#overriding). Removed class literals. @@ -53,7 +53,7 @@ has been brought in line with. From now on `+=`, has the same precedence as `=`. #### Wildcards as function parameters -A formal parameter to an anonymous fucntion may now be a +A formal parameter to an anonymous function may now be a [wildcard represented by an underscore](06-expressions.html#placeholder-syntax-for-anonymous-functions). > _ => 7 // The function that ignores its argument diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index bf53c47e9a..332acf4a26 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -261,7 +261,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { * Create a method based on a Function * * Used both to under `-Ydelambdafy:method` create a lifted function and - * under `-Ydelamdafy:inline` to create the apply method on the anonymous + * under `-Ydelambdafy:inline` to create the apply method on the anonymous * class. * * It creates a method definition with value params cloned from the diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index b76fb3d823..f9e6a12241 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2043,11 +2043,11 @@ self => /** Drop `private` modifier when followed by a qualifier. * Contract `abstract` and `override` to ABSOVERRIDE */ - private def normalizeModifers(mods: Modifiers): Modifiers = + private def normalizeModifiers(mods: Modifiers): Modifiers = if (mods.isPrivate && mods.hasAccessBoundary) - normalizeModifers(mods &~ Flags.PRIVATE) + normalizeModifiers(mods &~ Flags.PRIVATE) else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE)) - normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE) + normalizeModifiers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE) else mods @@ -2092,7 +2092,7 @@ self => * AccessModifier ::= (private | protected) [AccessQualifier] * }}} */ - def accessModifierOpt(): Modifiers = normalizeModifers { + def accessModifierOpt(): Modifiers = normalizeModifiers { in.token match { case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m))) case _ => NoMods @@ -2106,7 +2106,7 @@ self => * | override * }}} */ - def modifiers(): Modifiers = normalizeModifers { + def modifiers(): Modifiers = normalizeModifiers { def loop(mods: Modifiers): Modifiers = in.token match { case PRIVATE | PROTECTED => loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in)))) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 01eff71057..a927097b62 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -2016,7 +2016,7 @@ abstract class GenICode extends SubComponent { * * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally. - * Later reachability analysis will remove unreacahble code. + * Later reachability analysis will remove unreachable code. */ def Try(body: Context => Context, handlers: List[(Symbol, TypeKind, Context => Context)], @@ -2060,7 +2060,7 @@ abstract class GenICode extends SubComponent { if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) { if (finalizer != EmptyTree) { val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers - this.addActiveHandler(exh) // .. and body aswell + this.addActiveHandler(exh) // .. and body as well val exhStartCtx = finalizerCtx.enterExceptionHandler(exh) exhStartCtx.bb killIf outerCtx.bb.ignore val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc") diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 843648282b..0f17b5d694 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -20,7 +20,7 @@ abstract class ICodeCheckers { *

*
    *
  • - * for primitive operations: the type and numer of operands match + * for primitive operations: the type and number of operands match * the type of the operation *
  • *
  • diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 22ac8f84d4..e5eb0b79d5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -92,8 +92,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genThrow(expr: Tree): BType = { val thrownKind = tpeTK(expr) - // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable. - // Similarly for scala.Nothing (again, as defined in src/libray-aux). + // `throw null` is valid although scala.Null (as defined in src/library-aux) isn't a subtype of Throwable. + // Similarly for scala.Nothing (again, as defined in src/library-aux). assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference).get) genLoad(expr, thrownKind) lineNumber(expr) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 8720da84e8..0c26e01322 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1126,7 +1126,7 @@ object BTypes { * The map is indexed by the string s"$name$descriptor" (to * disambiguate overloads). * - * @param warning Contains an warning message if an error occured when building this + * @param warning Contains an warning message if an error occurred when building this * InlineInfo, for example if some classfile could not be found on * the classpath. This warning can be reported later by the inliner. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 7153c09377..7883ce7ffa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2697,7 +2697,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => case CMPG => (kind: @unchecked) match { case FLOAT => emit(Opcodes.FCMPG) - case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html + case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se6/html/Instructions2.doc3.html } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 31710dcbee..31b62f747e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -12,7 +12,7 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import BytecodeUtils._ /** - * Some notes on the ASM ananlyzer framework. + * Some notes on the ASM analyzer framework. * * Value * - Abstract, needs to be implemented for each analysis. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index 92b9b34006..b0dc6ead1b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -359,7 +359,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) { } /** - * Stores a local varaible index the opcode offset required for operating on that variable. + * Stores a local variable index the opcode offset required for operating on that variable. * * The xLOAD / xSTORE opcodes are in the following sequence: I, L, F, D, A, so the offset for * a local variable holding a reference (`A`) is 4. See also method `getOpcode` in [[scala.tools.asm.Type]]. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 8477f5461a..6b2786c1a3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -366,7 +366,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { clonedInstructions.insert(argStores) - // label for the exit of the inlined functions. xRETURNs are rplaced by GOTOs to this label. + // label for the exit of the inlined functions. xRETURNs are replaced by GOTOs to this label. val postCallLabel = newLabelNode clonedInstructions.add(postCallLabel) @@ -705,9 +705,9 @@ class Inliner[BT <: BTypes](val btypes: BT) { // - a method name+type // // execution [3] - // - resolve the CSP, yielding the boostrap method handle, the static args and the name+type + // - resolve the CSP, yielding the bootstrap method handle, the static args and the name+type // - resolution entails accessibility checking [4] - // - execute the `invoke` method of the boostrap method handle (which is signature polymorphic, check its javadoc) + // - execute the `invoke` method of the bootstrap method handle (which is signature polymorphic, check its javadoc) // - the descriptor for the call is made up from the actual arguments on the stack: // - the first parameters are "MethodHandles.Lookup, String, MethodType", then the types of the constant arguments, // - the return type is CallSite diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 2967f67e9c..efb026cdff 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -154,7 +154,7 @@ object Jar { def update(key: Attributes.Name, value: String) = attrs.put(key, value) } - // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html + // See http://docs.oracle.com/javase/7/docs/api/java/nio/file/Path.html // for some ideas. private val ZipMagicNumber = List[Byte](80, 75, 3, 4) private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber) diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 79776485de..82e7c76409 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -111,7 +111,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => impl setInfo new LazyImplClassType(iface) } - /** Return the implementation class of a trait; create a new one of one does not yet exist */ + /** Return the implementation class of a trait; create a new one if one does not yet exist */ def implClass(iface: Symbol): Symbol = { iface.info diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 6ecdd2b195..48eb878e2a 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -137,7 +137,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * and thus may only be accessed from value or method definitions owned by the current class * (ie there's no point drilling down into nested classes). * - * (d) regarding candidates in (b), they are accesible from all places listed in (c) and in addition + * (d) regarding candidates in (b), they are accessible from all places listed in (c) and in addition * from nested classes (nested at any number of levels). * * In all cases, we're done with traversing as soon as all candidates have been ruled out. diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index ddf003bb98..33fd5d0461 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -281,7 +281,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val parents = addSerializable(abstractFunctionErasedType) val funOwner = originalFunction.symbol.owner - // TODO harmonize the naming of delamdafy anon-fun classes with those spun up by Uncurry + // TODO harmonize the naming of delambdafy anon-fun classes with those spun up by Uncurry // - make `anonClass.isAnonymousClass` true. // - use `newAnonymousClassSymbol` or push the required variations into a similar factory method // - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash` diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 79a77d7a0c..57639a94c7 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -703,9 +703,46 @@ abstract class UnCurry extends InfoTransform // declared type and assign this to a synthetic val. Later, we'll patch // the method body to refer to this, rather than the parameter. val tempVal: ValDef = { + // SI-9442: using the "uncurry-erased" type (the one after the uncurry phase) can lead to incorrect + // tree transformations. For example, compiling: + // ``` + // def foo(c: Ctx)(l: c.Tree): Unit = { + // val l2: c.Tree = l + // } + // ``` + // Results in the following AST: + // ``` + // def foo(c: Ctx, l: Ctx#Tree): Unit = { + // val l$1: Ctx#Tree = l.asInstanceOf[Ctx#Tree] + // val l2: c.Tree = l$1 // no, not really, it's not. + // } + // ``` + // Of course, this is incorrect, since `l$1` has type `Ctx#Tree`, which is not a subtype of `c.Tree`. + // + // So what we need to do is to use the pre-uncurry type when creating `l$1`, which is `c.Tree` and is + // correct. Now, there are two additional problems: + // 1. when varargs and byname params are involved, the uncurry transformation desugares these special + // cases to actual typerefs, eg: + // ``` + // T* ~> Seq[T] (Scala-defined varargs) + // T* ~> Array[T] (Java-defined varargs) + // =>T ~> Function0[T] (by name params) + // ``` + // we use the DesugaredParameterType object (defined in scala.reflect.internal.transform.UnCurry) + // to redo this desugaring manually here + // 2. the type needs to be normalized, since `gen.mkCast` checks this (no HK here, just aliases have + // to be expanded before handing the type to `gen.mkAttributedCast`, which calls `gen.mkCast`) + val info0 = + enteringUncurry(p.symbol.info) match { + case DesugaredParameterType(desugaredTpe) => + desugaredTpe + case tpe => + tpe + } + val info = info0.normalize val tempValName = unit freshTermName (p.name + "$") - val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info) - atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info))) + val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(info) + atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), info))) } Packed(newParam, tempVal) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 99dd81c7e2..3ed128cbc5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -55,6 +55,13 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ protected def findMacroClassLoader(): ClassLoader = { val classpath = global.classPath.asURLs macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) @@ -658,7 +665,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { // // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer // the undetermined type params. Therefore we need to do something ourselves or otherwise this - // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum + // expandee will forever remain not expanded (see SI-5692). A traditional way out of this conundrum // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases, // but sometimes, if the inferencer lacks information, it will be forced to approximate. // diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index a7d48ceb89..e8db8309f1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -262,7 +262,14 @@ abstract class TreeCheckers extends Analyzer { checkedTyped(tree, mode, pt) ) private def checkedTyped(tree: Tree, mode: Mode, pt: Type): Tree = { - val typed = wrap(tree)(super.typed(tree, mode, pt)) + val typed = wrap(tree)(super.typed(tree.clearType(), mode, pt)) + + // Vlad: super.typed returns null for package defs, why is that? + if (typed eq null) + return tree + + if (typed.tpe ne null) + assert(!typed.tpe.isErroneous, "Tree has erroneous type: " + typed) if (tree ne typed) treesDiffer(tree, typed) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 8228adc20e..f26baf7d93 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3294,7 +3294,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 // // One can think of these methods as being infinitely overloaded. We create - // a ficticious new cloned method symbol for each call site that takes on a signature + // a fictitious new cloned method symbol for each call site that takes on a signature // governed by a) the argument types and b) the expected type val args1 = typedArgs(args, forArgMode(fun, mode)) val pts = args1.map(_.tpe.deconst) @@ -4095,7 +4095,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def resultingTypeTree(tpe: Type) = { // we need symbol-ful originals for reification - // hence we go the extra mile to hand-craft tis guy + // hence we go the extra mile to hand-craft this guy val original = arg1 match { case tt @ TypeTree() if tt.original != null => Annotated(ann, tt.original) // this clause is needed to correctly compile stuff like "new C @D" or "@(inline @getter)" @@ -4247,7 +4247,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // in the special (though common) case where the types are equal, it pays to pack before comparing // especially virtpatmat needs more aggressive unification of skolemized types // this breaks src/library/scala/collection/immutable/TrieIterator.scala - // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this) + // annotated types need to be lubbed regardless (at least, continuations break if you bypass them like this) def samePackedTypes = ( !isPastTyper && thenp1.tpe.annotations.isEmpty diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 8d4d07759f..2811520b67 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -22,7 +22,7 @@ import Jar.isJarOrZip /**

    * This module provides star expansion of '-classpath' option arguments, behaves the same as - * java, see [http://java.sun.com/javase/6/docs/technotes/tools/windows/classpath.html] + * java, see [[http://docs.oracle.com/javase/6/docs/technotes/tools/windows/classpath.html]] *

    * * @author Stepan Koltsov diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index ac63232967..e30d1ed7cd 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -1,9 +1,11 @@ package scala.tools package reflect +import scala.reflect.internal.util.ScalaClassLoader import scala.tools.nsc.Global import scala.tools.nsc.reporters.Reporter import scala.tools.nsc.Settings +import scala.tools.nsc.typechecker.Analyzer /** A version of Global that uses reflection to get class * infos, instead of reading class or source files. @@ -11,6 +13,20 @@ import scala.tools.nsc.Settings class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { + override lazy val analyzer = new { + val global: ReflectGlobal.this.type = ReflectGlobal.this + } with Analyzer { + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. + * The [[rootClassLoader]] is used to obtain runtime defined macros. + */ + override protected def findMacroClassLoader(): ClassLoader = { + val classpath = global.classPath.asURLs + ScalaClassLoader.fromURLs(classpath, rootClassLoader) + } + } + override def transformedType(sym: Symbol) = postErasure.transformInfo(sym, erasure.transformInfo(sym, diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index 1be186d114..8caf0c5c0e 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -27,7 +27,7 @@ package scala * w.print() * }}} * - * See the [[http://docs.scala-lang.org/sips/pending/value-classes.html value classes guide]] for more + * See the [[http://docs.scala-lang.org/sips/completed/value-classes.html value classes guide]] for more * details on the interplay of universal traits and value classes. */ abstract class Any { diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala index ff62948413..fb3d213e19 100644 --- a/src/library/scala/AnyVal.scala +++ b/src/library/scala/AnyVal.scala @@ -49,7 +49,7 @@ package scala * It's important to note that user-defined value classes are limited, and in some circumstances, * still must allocate a value class instance at runtime. These limitations and circumstances are * explained in greater detail in the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes Guide]] - * as well as in [[http://docs.scala-lang.org/sips/pending/value-classes.html SIP-15: Value Classes]], + * as well as in [[http://docs.scala-lang.org/sips/completed/value-classes.html SIP-15: Value Classes]], * the Scala Improvement Proposal. */ abstract class AnyVal extends Any { diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala index f2f9ead44c..e06557ccdd 100644 --- a/src/library/scala/Equals.scala +++ b/src/library/scala/Equals.scala @@ -13,8 +13,9 @@ package scala */ trait Equals extends Any { /** A method that should be called from every well-designed equals method - * that is open to be overridden in a subclass. See Programming in Scala, - * Chapter 28 for discussion and design. + * that is open to be overridden in a subclass. See + * [[http://www.artima.com/pins1ed/object-equality.html Programming in Scala, + * Chapter 28]] for discussion and design. * * @param that the value being probed for possible equality * @return true if this instance can possibly equal `that`, otherwise false diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index f134f5ce3d..7282feebb6 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -270,7 +270,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * // Returns None because the partial function doesn't cover the case. * Some("ftp") collect {case "http" => "HTTP"} * - * // Returns None because None is passed to the collect method. + * // Returns None because the option is empty. There is no value to pass to the partial function. * None collect {case value => value} * }}} * diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 6b71c0fa66..603d97c3ad 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -194,6 +194,7 @@ class HashSet[A] extends AbstractSet[A] protected def writeReplace(): AnyRef = new HashSet.SerializationProxy(this) + override def toSet[B >: A]: Set[B] = this.asInstanceOf[HashSet[B]] } /** $factoryInfo diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index 2e17677359..adc975479a 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -179,4 +179,6 @@ class ListSet[A] extends AbstractSet[A] override def tail: ListSet[A] = self } + + override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] } diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 94a5b7929a..bd5b9c9faf 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -113,6 +113,11 @@ self => override def - (elem: A): immutable.Set[A] = if (this(elem)) immutable.Set[A]() ++ this - elem else this + + // ImmutableDefaultKeySet is only protected, so we won't warn on override. + // Someone could override in a way that makes widening not okay + // (e.g. by overriding +, though the version in this class is fine) + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] } /** This function transforms all the values of mappings contained diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index 0fbf7942d4..a115469b83 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -35,12 +35,22 @@ trait Set[A] extends Iterable[A] override def companion: GenericCompanion[Set] = Set - /** Returns this $coll as an immutable map. - * - * A new map will not be built; lazy collections will stay lazy. + /** Returns this $coll as an immutable set, perhaps accepting a + * wider range of elements. Since it already is an + * immutable set, it will only be rebuilt if the underlying structure + * cannot be expanded to include arbitrary element types. + * For instance, `BitSet` and `SortedSet` will be rebuilt, as + * they require `Int` and sortable elements respectively. + * + * When in doubt, the set will be rebuilt. Rebuilt sets never + * need to be rebuilt again. */ - @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0") - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] + override def toSet[B >: A]: Set[B] = { + // This way of building sets typically has the best benchmarks, surprisingly! + val sb = Set.newBuilder[B] + foreach(sb += _) + sb.result() + } override def seq: Set[A] = this protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there! @@ -62,6 +72,7 @@ object Set extends ImmutableSetFactory[Set] { def - (elem: Any): Set[Any] = this def iterator: Iterator[Any] = Iterator.empty override def foreach[U](f: Any => U): Unit = {} + override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]] } private[collection] def emptyInstance: Set[Any] = EmptySet @@ -92,6 +103,10 @@ object Set extends ImmutableSetFactory[Set] { if (f(elem1)) Some(elem1) else None } + // Why is Set1 non-final? Need to fix that! + @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set1[B]] + } /** An optimized representation for immutable sets of size 2 */ @@ -123,6 +138,9 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem2)) Some(elem2) else None } + // Why is Set2 non-final? Need to fix that! + @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set2[B]] } /** An optimized representation for immutable sets of size 3 */ @@ -156,6 +174,9 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem3)) Some(elem3) else None } + // Why is Set3 non-final? Need to fix that! + @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set3[B]] } /** An optimized representation for immutable sets of size 4 */ @@ -191,6 +212,9 @@ object Set extends ImmutableSetFactory[Set] { else if (f(elem4)) Some(elem4) else None } + // Why is Set4 non-final? Need to fix that! + @deprecatedOverriding("This immutable set should do nothing on toSet but cast itself to a Set with a wider element type.", "2.11.8") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set4[B]] } } diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index f1493551ab..682788e18e 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -53,6 +53,12 @@ self => val map = self.rangeImpl(from, until) new map.DefaultKeySortedSet } + override def toSet[C >: A]: Set[C] = { + // This way of building sets typically has the best benchmarks, surprisingly! + val sb = Set.newBuilder[C] + foreach(sb += _) + sb.result() + } } /** Add a key/value pair to this map. diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index 619beeb1d6..e6889da3b5 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -18,8 +18,19 @@ import generic._ * * Only the `dequeue` and `dequeueAll` methods will return elements in priority * order (while removing elements from the heap). Standard collection methods - * including `drop` and `iterator` will remove or traverse the heap in whichever - * order seems most convenient. + * including `drop`, `iterator`, and `toString` will remove or traverse the heap + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not reveal the priority order of + * the elements, though the highest-priority element will be printed first. To + * print the elements in order, one must duplicate the `PriorityQueue` (by using + * `clone`, for instance) and then dequeue them: + * + * @example {{{ + * val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * println(pq) // elements probably not in order + * println(pq.clone.dequeueAll) // prints Vector(7, 5, 3, 2, 1) + * }}} * * @tparam A type of the elements in this priority queue. * @param ord implicit ordering used to compare the elements of type `A`. diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 9634f6d900..1ee27b0f36 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -40,7 +40,7 @@ class SyncVar[A] { wait(timeout) val elapsed = System.nanoTime() - start // nanoTime should be monotonic, but it's not possible to rely on that. - // See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6458294. + // See http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6458294. if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed) } diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index 60f99199cb..2a41e25b01 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -22,7 +22,7 @@ import scala.language.implicitConversions // // // MacRoman vs. UTF-8: see http://jira.codehaus.org/browse/JRUBY-3576 -// -Dfile.encoding: see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4375816 +// -Dfile.encoding: see http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4375816 /** A class for character encoding/decoding preferences. * diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala index ca7a3cddb8..82ec872806 100644 --- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala +++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala @@ -218,7 +218,7 @@ object ClassManifestFactory { /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not * strictly necessary as it could be obtained by reflection. It was * added so that erasure can be calculated without reflection. - * todo: remove after next boostrap + * todo: remove after next bootstrap */ def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] = new ClassManifest[T] { @@ -239,4 +239,4 @@ private class ClassTypeManifest[T]( (if (prefix.isEmpty) "" else prefix.get.toString+"#") + (if (runtimeClass.isArray) "Array" else runtimeClass.getName) + argString -} \ No newline at end of file +} diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index ce27a0ce63..026d5edd29 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -155,7 +155,7 @@ object ScalaRunTime { arr } - // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957 + // Java bug: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4071957 // More background at ticket #2318. def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index ee2bdbc4a7..b4f965f69b 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -88,7 +88,7 @@ object Sorting { a(pL - 1) = current pL -= 1 case x if x < 0 => - // Already in place. Just update indicies. + // Already in place. Just update indices. iA += 1 case _ if iB > pR => // Wrong side. There's room on the other side, so swap diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index cd7648a44a..f9b49f1730 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -588,7 +588,7 @@ trait Types { /** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)` * Here, `pre` is the prefix of the type reference, `sym` is the symbol * referred to by the type reference, and `args` is a possible empty list of - * type argumenrts. + * type arguments. * @group Extractors */ abstract class TypeRefExtractor { diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index e5d97e8959..a4223c1cb5 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -14,7 +14,7 @@ object ClassfileConstants { final val JAVA_MAJOR_VERSION = 45 final val JAVA_MINOR_VERSION = 3 - /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html) + /** (see http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.1) * * If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also * be set (ch. 2.13.1). diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 8ae201f045..902ba9fa80 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -237,7 +237,7 @@ trait Kinds { * * Proper types are represented using ProperTypeKind. * - * Type constructors are reprented using TypeConKind. + * Type constructors are represented using TypeConKind. */ abstract class Kind { import Kind.StringState diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 1113da2eff..05fdfb51ed 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2124,7 +2124,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** The package class containing this symbol, or NoSymbol if there * is not one. * TODO: formulate as enclosingSuchThat, after making sure - * we can start with current symbol rather than onwner. + * we can start with current symbol rather than owner. * TODO: Also harmonize with enclClass, enclMethod etc. */ def enclosingPackageClass: Symbol = { diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index e3f95f9fd8..bbd9df05d2 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1418,7 +1418,7 @@ trait Trees extends api.Trees { transformTypeDefs(tparams), transform(rhs)) } case LabelDef(name, params, rhs) => - treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy' + treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy' case PackageDef(pid, stats) => treeCopy.PackageDef( tree, transform(pid).asInstanceOf[RefTree], @@ -1601,7 +1601,7 @@ trait Trees extends api.Trees { case _ => // no special handling is required for Function or Import nodes here. // as they don't have interesting infos attached to their symbols. - // Subsitution of the referenced symbol of Return nodes is handled + // Substitution of the referenced symbol of Return nodes is handled // in .ChangeOwnerTraverser } tree match { diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index abea8bed9f..85e3ac60e8 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -40,19 +40,27 @@ trait UnCurry { apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe)) case NullaryMethodType(restpe) => apply(MethodType(List(), restpe)) - case TypeRef(pre, ByNameParamClass, arg :: Nil) => - apply(functionType(List(), arg)) - case TypeRef(pre, RepeatedParamClass, arg :: Nil) => - apply(seqType(arg)) - case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) => - apply(arrayType( - if (isUnboundedGeneric(arg)) ObjectTpe else arg)) + case DesugaredParameterType(desugaredTpe) => + apply(desugaredTpe) case _ => expandAlias(mapOver(tp)) } } } + object DesugaredParameterType { + def unapply(tpe: Type): Option[Type] = tpe match { + case TypeRef(pre, ByNameParamClass, arg :: Nil) => + Some(functionType(List(), arg)) + case TypeRef(pre, RepeatedParamClass, arg :: Nil) => + Some(seqType(arg)) + case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) => + Some(arrayType(if (isUnboundedGeneric(arg)) ObjectTpe else arg)) + case _ => + None + } + } + private val uncurryType = new TypeMap { def apply(tp0: Type): Type = { val tp = expandAlias(tp0) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index a2232d1963..278d081249 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -445,7 +445,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.ScalaValueClassesNoUnit definitions.ScalaValueClasses - + uncurry.DesugaredParameterType erasure.GenericArray erasure.scalaErasure erasure.specialScalaErasure diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 06ae179da9..3b54f5274e 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -133,7 +133,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } catch AbstractOrMissingHandler() } - private def tquoted(s: String) = "\"\"\"" + s + "\"\"\"" private val logScope = scala.sys.props contains "scala.repl.scope" private def scopelog(msg: String) = if (logScope) Console.err.println(msg) @@ -905,7 +904,10 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def path = originalPath("$intp") def envLines = { if (!isReplPower) Nil // power mode only for now - else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees")) + else { + val escapedLine = Constant(originalLine).escapedStringValue + List(s"""def $$line = $escapedLine """, """def $trees = _root_.scala.Nil""") + } } def preamble = s""" |$headerPreamble diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala index 743c2a401c..0353e58e67 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala @@ -787,7 +787,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp if (isReduced) NodeSeq.Empty else { def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = { def param0(vl: ValueParam): NodeSeq = - // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output + // notice the }{ in the next lines, they are necessary to avoid an undesired whitespace in output { Text(vl.name) }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala index 4bed106f43..9381cf3a35 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala @@ -145,7 +145,7 @@ class DotProcess(settings: doc.Settings) { // we shouldn't just sit there for 50s not reporting anything, no? settings.printMsg("Graphviz dot encountered an error when generating the diagram for:") settings.printMsg(templateName) - settings.printMsg("These are usually spurious errors, but if you notice a persistant error on") + settings.printMsg("These are usually spurious errors, but if you notice a persistent error on") settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.") } } diff --git a/test/files/neg/name-lookup-stable.check b/test/files/neg/name-lookup-stable.check index 751df9505e..68d98c4162 100644 --- a/test/files/neg/name-lookup-stable.check +++ b/test/files/neg/name-lookup-stable.check @@ -6,6 +6,6 @@ import ColumnOption._ name-lookup-stable.scala:17: error: reference to PrimaryKey is ambiguous; it is both defined in class A and imported subsequently by import ColumnOption._ - PrimaryKey // was already ambigious in 2.10.3 + PrimaryKey // was already ambiguous in 2.10.3 ^ two errors found diff --git a/test/files/neg/name-lookup-stable.scala b/test/files/neg/name-lookup-stable.scala index 0d862f06e1..2941e05875 100644 --- a/test/files/neg/name-lookup-stable.scala +++ b/test/files/neg/name-lookup-stable.scala @@ -14,7 +14,7 @@ class A { (null: Any) match { case PrimaryKey => } - PrimaryKey // was already ambigious in 2.10.3 + PrimaryKey // was already ambiguous in 2.10.3 } } diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala index 8da3868566..b1ba41bd54 100644 --- a/test/files/neg/t5376.scala +++ b/test/files/neg/t5376.scala @@ -12,7 +12,7 @@ object Test { "a": Int } - // Import one implict and one non-implicit method with the + // Import one implicit and one non-implicit method with the // same name in the same scope. def m2 = { import O1._ diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala index b29d591cb1..cbf0bf1c5a 100644 --- a/test/files/neg/t8597b.scala +++ b/test/files/neg/t8597b.scala @@ -4,7 +4,7 @@ object Unchecked { // t is a fresh pattern type variable, despite our attempts to // backtick our way to the enclosing `t`. Under this interpretation, - // the absense of an unchecked warning is expected. + // the absence of an unchecked warning is expected. (null: Any) match { case _: Some[t] => // no warn } diff --git a/test/files/neg/t8675b.scala b/test/files/neg/t8675b.scala index bffed2141c..b2212fa234 100644 --- a/test/files/neg/t8675b.scala +++ b/test/files/neg/t8675b.scala @@ -9,7 +9,7 @@ object Test { } trait Reportable1[Params, R] - // "missing paramater type" error was swallowed in 2.11.0 leading to a crash + // "missing parameter type" error was swallowed in 2.11.0 leading to a crash // in the backend. // // This error is itself a regression (or at least a change) in 2.11.0-M7, diff --git a/test/files/neg/virtpatmat_exhaust_compound.scala b/test/files/neg/virtpatmat_exhaust_compound.scala index 386c7af98d..4ff04dd06a 100644 --- a/test/files/neg/virtpatmat_exhaust_compound.scala +++ b/test/files/neg/virtpatmat_exhaust_compound.scala @@ -10,7 +10,7 @@ case object O3 extends Base2 case object O4 extends Base with Base2 object Test { - val a /*: Product with Serialiable with Base */ = if (true) O1 else O2 + val a /*: Product with Serializable with Base */ = if (true) O1 else O2 a match { case null => } diff --git a/test/files/pos/t2405.scala b/test/files/pos/t2405.scala index 224b2ce83b..0bc7a771b2 100644 --- a/test/files/pos/t2405.scala +++ b/test/files/pos/t2405.scala @@ -6,14 +6,14 @@ object Test1 { implicitly[Int] } -// Testing for the absense of shadowing #1. +// Testing for the absence of shadowing #1. object Test2 { import A.{x => y} val x = 2 implicitly[Int] } -// Testing for the absense of shadowing #2. +// Testing for the absence of shadowing #2. object Test3 { { import A.{x => y} diff --git a/test/files/pos/t8002-nested-scope.scala b/test/files/pos/t8002-nested-scope.scala index a2088bce7a..8ce809e556 100644 --- a/test/files/pos/t8002-nested-scope.scala +++ b/test/files/pos/t8002-nested-scope.scala @@ -1,5 +1,5 @@ // This test serves to capture the status quo, but should really -// emit an accessibiltiy error. +// emit an accessibility error. // `Namers#companionSymbolOf` seems too lenient, and currently doesn't // implement the same-scope checks mentioned: diff --git a/test/files/pos/t9442.scala b/test/files/pos/t9442.scala new file mode 100644 index 0000000000..2ea81e79cb --- /dev/null +++ b/test/files/pos/t9442.scala @@ -0,0 +1,14 @@ +trait Ctx { + trait Tree +} +trait Lst[+A] { + def zip[A1 >: A, B](that: Lst[B]): Nothing +} +class C[@specialized(Int) T] { + def moo(t: T) = { + def foo1(c: Ctx)(l: Lst[c.Tree]) = l zip l + def foo2(c: Ctx)(l: Lst[c.Tree]*) = l(0) zip l(1) + def foo3(c: Ctx)(l: => Lst[c.Tree]) = l zip l + ??? + } +} diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala index fd3f2a996a..2291b22f7a 100644 --- a/test/files/run/dead-code-elimination.scala +++ b/test/files/run/dead-code-elimination.scala @@ -10,7 +10,7 @@ // the stack after code elimination. // // Originally, this did not compile, but I included it in the run -// tests because this was ASM-dependand and did not happen for GenJVM. +// tests because this was ASM-dependent and did not happen for GenJVM. // // Thus, we run the code and force the loading of class B -- if the // bytecode is incorrect, it will fail the test. diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala index 7fb4a04546..4159dbdf91 100644 --- a/test/files/run/names-defaults.scala +++ b/test/files/run/names-defaults.scala @@ -192,7 +192,7 @@ object Test extends App { println(argName) // should be 4 test5 { argName = 5 } println(argName) // should be 5 - val a: Unit = test1(a = 10, b = "2") // local values a and b exist, but not ambiuous since they're val's + val a: Unit = test1(a = 10, b = "2") // local values a and b exist, but it's not ambiguous since they're vals // dependent types and copy method diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala index 5c5a20fa3b..454539a4b1 100644 --- a/test/files/run/nothingTypeNoOpt.scala +++ b/test/files/run/nothingTypeNoOpt.scala @@ -26,7 +26,7 @@ class C { } def f5(x: Boolean) = { - // stack heights need to be the smae. ??? looks to the jvm like returning a value of + // stack heights need to be the same. ??? looks to the jvm like returning a value of // type Nothing$, need to drop or throw it. println( if (x) { ???; 10 } diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check index 2a7b7783d9..4e030bd9fa 100644 --- a/test/files/run/repl-power.check +++ b/test/files/run/repl-power.check @@ -25,4 +25,7 @@ m: $r.treedsl.global.Literal = 10 scala> typed(m).tpe // typed is in scope res2: $r.treedsl.global.Type = Int(10) +scala> """escaping is hard, m'kah""" +res3: String = escaping is hard, m'kah + scala> :quit diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala index 4dfeb37885..5ecaad8723 100644 --- a/test/files/run/repl-power.scala +++ b/test/files/run/repl-power.scala @@ -1,7 +1,9 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { - def code = """ + def tripleQuote(s: String) = "\"\"\"" + s + "\"\"\"" + + def code = s""" :power // guarding against "error: reference to global is ambiguous" global.emptyValDef // "it is imported twice in the same scope by ..." @@ -9,5 +11,6 @@ val tp = ArrayClass[scala.util.Random] // magic with tags tp.memberType(Array_apply) // evidence val m = LIT(10) // treedsl typed(m).tpe // typed is in scope +${tripleQuote("escaping is hard, m'kah")} """.trim } diff --git a/test/files/run/t8047.scala b/test/files/run/t8047.scala index f5660541e8..9ec8c1dc56 100644 --- a/test/files/run/t8047.scala +++ b/test/files/run/t8047.scala @@ -1,7 +1,7 @@ object Test extends App { import scala.reflect.runtime.universe._ // - // x's owner is outer Test scope. Previosly the quasiquote expansion + // x's owner is outer Test scope. Previously the quasiquote expansion // looked like: // // object Test { diff --git a/test/files/run/toolbox_expand_macro.check b/test/files/run/toolbox_expand_macro.check new file mode 100644 index 0000000000..d81cc0710e --- /dev/null +++ b/test/files/run/toolbox_expand_macro.check @@ -0,0 +1 @@ +42 diff --git a/test/files/run/toolbox_expand_macro.scala b/test/files/run/toolbox_expand_macro.scala new file mode 100644 index 0000000000..a52e449168 --- /dev/null +++ b/test/files/run/toolbox_expand_macro.scala @@ -0,0 +1,23 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{universe => ru} +import scala.reflect.runtime.{currentMirror => cm} +import scala.tools.reflect.{ToolBox} + +object Test extends App { + val toolBox = cm.mkToolBox() + val x = 21 + val runtimeMacro = + q"""object RuntimeMacro { + import scala.reflect.macros.whitebox.Context + import scala.language.experimental.macros + + def add(y: Int): Int = macro addImpl + def addImpl(c: Context)(y: c.Expr[Int]): c.Expr[Int] = { + import c.universe._ + val x = $x + c.Expr[Int](q"$$x + $$y") + } + }""" + val s = toolBox.define(runtimeMacro) + println(toolBox.eval(q"$s.add(21)")) +} diff --git a/test/junit/scala/collection/immutable/SetTests.scala b/test/junit/scala/collection/immutable/SetTests.scala new file mode 100644 index 0000000000..28c7864359 --- /dev/null +++ b/test/junit/scala/collection/immutable/SetTests.scala @@ -0,0 +1,81 @@ +package scala.collection.immutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class SetTests { + @Test + def test_SI8346_toSet_soundness(): Unit = { + val any2stringadd = "Disabled string conversions so as not to get confused!" + + def any[A](set: Set[A]): Set[Any] = { + val anyset = set.toSet[Any] + assert((anyset + "fish") contains "fish") + anyset + } + + // Make sure default immutable Set does not rebuild itself on widening with toSet + // Need to cover 0, 1, 2, 3, 4 elements as special cases + var si = Set.empty[Int] + assert(si eq si.toSet[Any]) + for (i <- 1 to 5) { + val s1 = Set(Array.range(1, i+1): _*) + val s2 = si + i + val s1a = any(s1) + val s2a = any(s2) + assert(s1 eq s1a) + assert(s2 eq s2a) + si = s2 + } + + // Make sure BitSet correctly rebuilds itself on widening with toSet + // Need to cover empty, values 0-63, values 0-127 as special cases + val bitsets = Seq(BitSet.empty, BitSet(23), BitSet(23, 99), BitSet(23, 99, 141)) + bitsets.foreach{ b => + val ba = any(b) + assert(b ne ba) + assertEquals(b, ba) + } + + // Make sure HashSet (and by extension, its implementing class HashTrieSet) + // does not rebuild itself on widening by toSet + val hashset = HashSet(1, 3, 5, 7) + val hashseta = any(hashset) + assert(hashset eq hashseta) + + // Make sure ListSet does not rebuild itself on widening by toSet + // (Covers Node also, since it subclasses ListSet) + val listset = ListSet(1, 3, 5, 7) + val listseta = any(listset) + assert(listset eq listseta) + + // Make sure SortedSets correctly rebuild themselves on widening with toSet + // Covers TreeSet and keySet of SortedMap also + val sortedsets = Seq( + SortedSet.empty[Int], SortedSet(5), SortedSet(1,2,3,5,4), + SortedMap(1 -> "cod", 2 -> "herring").keySet + ) + sortedsets.foreach{ set => + val seta = any(set) + assert(set ne seta) + assertEquals(set, seta) + } + + // Make sure ValueSets correctly rebuild themselves on widening with toSet + object WeekDay extends Enumeration { + type WeekDay = Value + val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value + } + val valuesa = any(WeekDay.values) + assert(WeekDay.values ne valuesa) + assertEquals(WeekDay.values, valuesa) + + // Make sure regular Map keySets do not rebuild themselves on widening with toSet + val mapset = Map(1 -> "cod", 2 -> "herring").keySet + val mapseta = any(mapset) + assert(mapset eq mapseta) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index a5b3faced8..941a167114 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -204,7 +204,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { def iincProdCons(): Unit = { import Opcodes._ val m = genMethod(descriptor = "(I)I")( - Incr(IINC, 1, 1), // producer and cosumer of local variable 1 + Incr(IINC, 1, 1), // producer and consumer of local variable 1 VarOp(ILOAD, 1), Op(IRETURN) ) -- cgit v1.2.3