summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Cunei <antonio.cunei@epfl.ch>2011-07-22 16:34:35 +0000
committerAntonio Cunei <antonio.cunei@epfl.ch>2011-07-22 16:34:35 +0000
commit3307c3771777cf17d5b4d059726a1b484923d042 (patch)
tree90dd3a955d06bd439f69c34a464939a5470897c2
parentf346e54d24847e0529ce1e5ee43e47a038a4961e (diff)
downloadscala-3307c3771777cf17d5b4d059726a1b484923d042.tar.gz
scala-3307c3771777cf17d5b4d059726a1b484923d042.tar.bz2
scala-3307c3771777cf17d5b4d059726a1b484923d042.zip
Backport from trunk of a large batch of revisions:
24909,24919,24941,24961,24963,24965,24981,24984,24986,24987,24999, 25000,25001,25002,25003,25004,25005,25006,25007,25008,25009,25010, 25015,25028,25029,25030,25031,25033,25037,25038,25039,25040,25041, 25044,25045,25046,25050,25052,25053,25054,25055,25057,25058,25059, 25061,25062,25063,25065,25066,25069,25070,25071,25072,25074,25075, 25076,25080,25081,25082,25083,25085,25087,25088,25090,25091,25092, 25093,25094,25095,25096,25097,25098,25099,25100,25101,25110,25111, 25112,25113,25114,25117,25119,25122,25124,25125,25126,25127,25128, 25130,25132,25133,25134,25135,25136,25137,25138,25138,25139,25140, 25141,25142,25144,25145,25146,25148,25149,25152,25153,25158,25160, 25161,25162,25164,25167,25169,25170,25171,25172,25202,25204,25208, 25209,25252
-rw-r--r--META-INF/MANIFEST.MF9
-rw-r--r--lib/jline.jar.desired.sha12
-rw-r--r--project/build.properties2
-rw-r--r--src/actors/scala/actors/Actor.scala27
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala2
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl4
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala23
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala10
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala1
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala71
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/Page.scala93
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala70
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala68
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala12
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala5
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala32
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala37
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala46
-rw-r--r--src/compiler/scala/tools/nsc/io/PlainFile.scala20
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala50
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualFile.scala19
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala424
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala11
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala13
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala52
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala43
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala42
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala70
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala10
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala9
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala115
-rw-r--r--src/compiler/scala/tools/nsc/reporters/StoreReporter.scala12
-rw-r--r--src/compiler/scala/tools/nsc/settings/AestheticSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala16
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala62
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala27
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Names.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala31
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala22
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala68
-rw-r--r--src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala41
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala515
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala94
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala23
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala1022
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala70
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala233
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala428
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala160
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala178
-rw-r--r--src/compiler/scala/tools/nsc/util/CharArrayReader.scala19
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala283
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala8
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala2
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala22
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala35
-rw-r--r--src/jline/TEST-NOTE.txt4
-rw-r--r--src/jline/project/build.properties6
-rw-r--r--src/jline/project/plugins/project/build.properties2
-rw-r--r--src/jline/src/main/java/scala/tools/jline/UnixTerminal.java3
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java30
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java15
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/Key.java2
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/Operation.java6
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/keybindings.properties5
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java48
-rw-r--r--src/library-aux/scala/Any.scala6
-rwxr-xr-xsrc/library/scala/Boolean.scala2
-rw-r--r--src/library/scala/Byte.scala1
-rw-r--r--src/library/scala/Char.scala1
-rw-r--r--src/library/scala/Double.scala1
-rw-r--r--src/library/scala/Enumeration.scala6
-rw-r--r--src/library/scala/Float.scala1
-rw-r--r--src/library/scala/Int.scala1
-rw-r--r--src/library/scala/Long.scala1
-rw-r--r--src/library/scala/Option.scala29
-rw-r--r--src/library/scala/Predef.scala41
-rw-r--r--src/library/scala/Short.scala1
-rwxr-xr-xsrc/library/scala/Unit.scala5
-rw-r--r--src/library/scala/collection/GenIterableLike.scala2
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala6
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqViewLike.scala6
-rw-r--r--src/library/scala/collection/GenSetLike.scala10
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala6
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala23
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala18
-rw-r--r--src/library/scala/collection/immutable/Range.scala11
-rw-r--r--src/library/scala/collection/immutable/Stream.scala4
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala10
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala11
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala5
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala9
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala11
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala4
-rw-r--r--src/library/scala/collection/parallel/package.scala6
-rw-r--r--src/library/scala/io/BufferedSource.scala3
-rw-r--r--src/library/scala/math/BigInt.scala7
-rwxr-xr-xsrc/library/scala/reflect/generic/AnnotationInfos.scala2
-rw-r--r--src/library/scala/reflect/generic/ByteCodecs.scala8
-rwxr-xr-xsrc/library/scala/reflect/generic/Constants.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Flags.scala43
-rw-r--r--src/library/scala/reflect/generic/HasFlags.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Names.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/PickleBuffer.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/PickleFormat.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Scopes.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/StandardDefinitions.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/StdNames.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Trees.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/Types.scala2
-rwxr-xr-xsrc/library/scala/reflect/generic/UnPickler.scala44
-rwxr-xr-xsrc/library/scala/reflect/generic/Universe.scala1
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala16
-rw-r--r--src/library/scala/util/control/NoStackTrace.scala6
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala12
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala13
-rw-r--r--src/swing/scala/swing/RichWindow.scala6
-rw-r--r--test/disabled/pos/spec-List.scala (renamed from test/files/pos/spec-List.scala)0
-rw-r--r--test/files/jvm/actor-sync-send-timeout.scala47
-rw-r--r--test/files/jvm/manifests.check2
-rw-r--r--test/files/neg/abstract-inaccessible.check13
-rw-r--r--test/files/neg/abstract-inaccessible.flags1
-rw-r--r--test/files/neg/abstract-inaccessible.scala9
-rw-r--r--test/files/neg/bug278.check2
-rw-r--r--test/files/neg/bug4419.check4
-rw-r--r--test/files/neg/bug4419.scala3
-rw-r--r--test/files/neg/bug4533.check4
-rw-r--r--test/files/neg/bug4533.scala8
-rw-r--r--test/files/neg/bug4584.check4
-rw-r--r--test/files/neg/bug4584.scala1
-rw-r--r--test/files/neg/bug4727.check11
-rw-r--r--test/files/neg/bug4727.scala7
-rw-r--r--test/files/neg/bug588.check4
-rw-r--r--test/files/neg/bug752.check4
-rw-r--r--test/files/neg/divergent-implicit.check6
-rw-r--r--test/files/neg/nullary-override.check4
-rw-r--r--test/files/neg/nullary-override.flags1
-rw-r--r--test/files/neg/nullary-override.scala3
-rw-r--r--test/files/neg/t0003.check4
-rw-r--r--test/files/neg/t0015.check2
-rw-r--r--test/files/neg/t2179.check9
-rw-r--r--test/files/neg/t2641.check12
-rw-r--r--test/files/neg/t3507.check2
-rw-r--r--test/files/neg/t3774.check7
-rw-r--r--test/files/neg/unit2anyref.check4
-rw-r--r--test/files/neg/variances.check2
-rw-r--r--test/files/pos/bug1439.flags1
-rw-r--r--test/files/pos/bug1439.scala (renamed from test/files/pos/t1439.scala)3
-rw-r--r--test/files/pos/bug2094.scala31
-rw-r--r--test/files/pos/bug3048.scala8
-rw-r--r--test/files/pos/bug3343.scala15
-rw-r--r--test/files/pos/bug3528.scala8
-rw-r--r--test/files/pos/bug4018.scala15
-rw-r--r--test/files/pos/bug4237.scala6
-rw-r--r--test/files/pos/bug4501.scala14
-rwxr-xr-xtest/files/pos/bug4553.scala11
-rw-r--r--test/files/pos/bug4731.scala14
-rw-r--r--test/files/pos/hk-infer.scala37
-rwxr-xr-xtest/files/pos/t2179.scala (renamed from test/files/neg/t2179.scala)0
-rw-r--r--test/files/pos/t3774.scala (renamed from test/files/neg/t3774.scala)0
-rw-r--r--test/files/pos/t4547.scala4
-rw-r--r--test/files/pos/t4593.scala20
-rw-r--r--test/files/pos/t4692.scala27
-rw-r--r--test/files/pos/widen-existential.scala7
-rw-r--r--test/files/run/TestFlatMap.scala29
-rw-r--r--test/files/run/bug2308a.check1
-rw-r--r--test/files/run/bug2308a.scala7
-rw-r--r--test/files/run/bug4110.check2
-rw-r--r--test/files/run/bug4110.scala11
-rw-r--r--test/files/run/bug4570.check1
-rw-r--r--test/files/run/bug4570.scala8
-rw-r--r--test/files/run/bug4656.check1
-rw-r--r--test/files/run/bug4656.scala13
-rw-r--r--test/files/run/bug4660.scala11
-rw-r--r--test/files/run/bug4697.check1
-rw-r--r--test/files/run/bug4697.scala8
-rw-r--r--test/files/run/constrained-types.check2
-rw-r--r--test/files/run/getClassTest.check18
-rw-r--r--test/files/run/getClassTest.scala66
-rw-r--r--test/files/run/null-and-intersect.check9
-rw-r--r--test/files/run/null-and-intersect.scala34
-rw-r--r--test/files/run/view-headoption.check28
-rw-r--r--test/files/run/view-headoption.scala18
-rw-r--r--test/files/scalacheck/range.scala7
-rw-r--r--test/files/specialized/fft.check2
-rw-r--r--test/pending/pos/no-widen-locals.scala (renamed from test/files/pos/no-widen-locals.scala)2
-rw-r--r--test/pending/pos/those-kinds-are-high.scala37
-rw-r--r--test/scaladoc/scala/IndexScriptTest.scala52
-rw-r--r--test/scaladoc/scala/IndexTest.scala20
218 files changed, 3904 insertions, 2400 deletions
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 4685e0d0fd..ea7f77e45a 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -9,8 +9,7 @@ Bundle-ClassPath:
bin,
lib/fjbg.jar,
lib/jline.jar,
- lib/msil.jar,
- lib/scala-library.jar
+ lib/msil.jar
Export-Package:
scala.tools.nsc,
scala.tools.nsc.ast,
@@ -22,6 +21,10 @@ Export-Package:
scala.tools.nsc.backend.opt,
scala.tools.nsc.dependencies,
scala.tools.nsc.doc,
+ scala.tools.nsc.doc.html,
+ scala.tools.nsc.doc.html.page,
+ scala.tools.nsc.doc.model,
+ scala.tools.nsc.doc.model.comment,
scala.tools.nsc.interactive,
scala.tools.nsc.interpreter,
scala.tools.nsc.io,
@@ -36,6 +39,8 @@ Export-Package:
scala.tools.nsc.typechecker,
scala.tools.nsc.util,
scala.tools.util,
+ scala.reflect.internal,
+ scala.reflect.internal.settings,
ch.epfl.lamp.compiler.msil,
ch.epfl.lamp.compiler.msil.emit,
ch.epfl.lamp.compiler.msil.util,
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
index 74b0d9d91e..ea3729c24c 100644
--- a/lib/jline.jar.desired.sha1
+++ b/lib/jline.jar.desired.sha1
@@ -1 +1 @@
-5eade2bafa228aab1f5abc63e6062ce8052e4f6d ?jline.jar
+545b37930819a1196705e582a232abfeb252cc8d ?jline.jar
diff --git a/project/build.properties b/project/build.properties
index 39dc158e14..4775404a76 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -2,7 +2,7 @@
#Sun Apr 11 14:24:47 CEST 2010
project.name=scala
def.scala.version=2.7.7
-sbt.version=0.7.5.RC0
+sbt.version=0.7.7
copyright=Copyright 2002-2011, LAMP/EPFL
build.scala.versions=2.7.7
project.initialize=false
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 25cfbf7865..57e107538c 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -590,20 +590,27 @@ trait Actor extends AbstractActor with ReplyReactor with ActorCanReply with Inpu
receiveTimeout
} else {
if (onTimeout.isEmpty) {
- waitingFor = f
- received = None
- isSuspended = true
+ if (!f.isDefinedAt(TIMEOUT))
+ sys.error("unhandled timeout")
+
val thisActor = this
onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
+ def run() {
+ thisActor.send(TIMEOUT, thisActor)
+ }
})
Actor.timer.schedule(onTimeout.get, msec)
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else
- sys.error("unhandled timeout")
+ }
+
+ // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
+ // See SI-4759
+ waitingFor = f
+ received = None
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
}
}
todo()
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 6e05234892..f8d5f74f50 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -615,7 +615,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
file
}
- val res = execWithArgFiles(java, List(writeSettings.getCanonicalPath))
+ val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath))
if (failonerror && res != 0)
buildError("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 21651831df..e88a9730e8 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -21,7 +21,7 @@ set _JAVACMD=%JAVACMD%
if "%_JAVACMD%"=="" (
if not "%JAVA_HOME%"=="" (
- if exist "%JAVA_HOME%\bin\java.exe" set _JAVACMD=%JAVA_HOME%\bin\java.exe
+ if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
)
)
@@ -37,7 +37,7 @@ if "%_TOOL_CLASSPATH%"=="" (
for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
)
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Denv.emacs="%EMACS%" @properties@
+set _PROPS=-Dscala.home="%_SCALA_HOME%" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
"%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 9f8e488c43..31b44744da 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -12,12 +12,12 @@ trait AnyValReps {
self: AnyVals =>
sealed abstract class AnyValNum(name: String) extends AnyValRep(name) {
- def isCardinal: Boolean = isIntegerType(this)
- def unaryOps = if (isCardinal) List("+", "-", "~") else List("+", "-")
- def bitwiseOps = if (isCardinal) List("|", "&", "^") else Nil
- def shiftOps = if (isCardinal) List("<<", ">>>", ">>") else Nil
- def comparisonOps = List("==", "!=", "<", "<=", ">", ">=")
- def otherOps = List("+", "-" ,"*", "/", "%")
+ def isCardinal: Boolean = isIntegerType(this)
+ def unaryOps = if (isCardinal) List("+", "-", "~") else List("+", "-")
+ def bitwiseOps = if (isCardinal) List("|", "&", "^") else Nil
+ def shiftOps = if (isCardinal) List("<<", ">>>", ">>") else Nil
+ def comparisonOps = List("==", "!=", "<", "<=", ">", ">=")
+ def otherOps = List("+", "-" ,"*", "/", "%")
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -49,7 +49,7 @@ trait AnyValReps {
)
xs1 ++ xs2
}
- def classLines = clumps.foldLeft(List[String]()) {
+ def classLines = (clumps :+ commonClassLines).foldLeft(List[String]()) {
case (res, Nil) => res
case (res, lines) =>
val xs = lines map {
@@ -80,6 +80,9 @@ trait AnyValReps {
sealed abstract class AnyValRep(val name: String) {
def classLines: List[String]
def objectLines: List[String]
+ def commonClassLines = List(
+ "def getClass(): Class[@name@]"
+ )
def lcname = name.toLowerCase
def boxedName = this match {
@@ -243,6 +246,8 @@ def &&(x: Boolean): Boolean = sys.error("stub")
def |(x: Boolean): Boolean = sys.error("stub")
def &(x: Boolean): Boolean = sys.error("stub")
def ^(x: Boolean): Boolean = sys.error("stub")
+
+def getClass(): Class[Boolean] = sys.error("stub")
""".trim.lines.toList
def objectLines = interpolate(allCompanions).lines.toList
@@ -254,7 +259,9 @@ def ^(x: Boolean): Boolean = sys.error("stub")
* only one value of type Unit: `()`.
*/
"""
- def classLines = Nil
+ def classLines = List(
+ """def getClass(): Class[Unit] = sys.error("stub")"""
+ )
def objectLines = interpolate(allCompanions).lines.toList
override def boxUnboxImpls = Map(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 6af7fe991f..6682cab8a0 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -157,7 +157,10 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
- def log(msg: => AnyRef): Unit = if (opt.logPhase) inform("[log " + phase + "] " + msg)
+ // Over 200 closure objects are eliminated by inlining this.
+ @inline final def log(msg: => AnyRef): Unit =
+ if (settings.log containsPhase globalPhase)
+ inform("[log " + phase + "] " + msg)
def logThrowable(t: Throwable): Unit = globalError(throwableAsString(t))
def throwableAsString(t: Throwable): String =
@@ -232,7 +235,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// debugging
def checkPhase = wasActive(settings.check)
def logPhase = isActive(settings.log)
- def typerDebug = settings.Ytyperdebug.value
def writeICode = settings.writeICode.value
// showing/printing things
@@ -255,9 +257,10 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def profileClass = settings.YprofileClass.value
def profileMem = settings.YprofileMem.value
- // XXX: short term, but I can't bear to add another option.
- // scalac -Dscala.timings will make this true.
+ // shortish-term property based options
def timings = sys.props contains "scala.timings"
+ def inferDebug = (sys.props contains "scalac.debug.infer") || settings.Yinferdebug.value
+ def typerDebug = (sys.props contains "scalac.debug.typer") || settings.Ytyperdebug.value
}
// True if -Xscript has been set, indicating a script run.
@@ -323,7 +326,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
currentRun.currentUnit = unit
if (!cancelled(unit)) {
currentRun.informUnitStarting(this, unit)
- reporter.withSource(unit.source) { apply(unit) }
+ apply(unit)
}
currentRun.advanceUnit
} finally {
@@ -335,6 +338,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Switch to turn on detailed type logs */
var printTypings = opt.typerDebug
+ var printInfers = opt.inferDebug
// phaseName = "parser"
object syntaxAnalyzer extends {
@@ -1051,9 +1055,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def loop(ph: Phase) {
if (stop(ph)) refreshProgress
else {
- reporter.withSource(unit.source) {
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
- }
+ atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
loop(ph.next match {
case `ph` => null // ph == ph.next implies terminal, and null ends processing
case x => x
@@ -1091,7 +1093,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/**
* Re-orders the source files to
* 1. ScalaObject
- * 2. LowPriorityImplicits / StandardEmbeddings (i.e. parents of Predef)
+ * 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
* 3. the rest
*
* 1 is to avoid cyclic reference errors.
@@ -1121,6 +1123,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
case "ScalaObject.scala" => 1
case "LowPriorityImplicits.scala" => 2
case "StandardEmbeddings.scala" => 2
+ case "EmbeddedControls.scala" => 2
case "Predef.scala" => 3 /* Predef.scala before Any.scala, etc. */
case _ => goLast
}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 96b7bce885..3ec5b2f044 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -5,18 +5,9 @@
package scala.tools.nsc
-import java.io.{
- InputStream, OutputStream,
- BufferedReader, FileInputStream, FileOutputStream,
- FileReader, InputStreamReader, PrintWriter, FileWriter,
- IOException
-}
-import io.{ Directory, File, Path, PlainFile }
+import io.{ Directory, File, Path }
+import java.io.IOException
import java.net.URL
-import java.util.jar.{ JarEntry, JarOutputStream }
-
-import util.{ waitingForThreads }
-import scala.tools.util.PathResolver
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
import util.Exceptional.unwrap
@@ -136,7 +127,7 @@ class ScriptRunner extends HasCompileSocket {
/** The script runner calls sys.exit to communicate a return value, but this must
* not take place until there are no non-daemon threads running. Tickets #1955, #2006.
*/
- waitingForThreads {
+ util.waitingForThreads {
if (settings.save.value) {
val jarFile = jarFileFor(scriptFile)
def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 404c9156fe..6c35514110 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -253,12 +253,6 @@ trait TreeDSL {
class SymbolMethods(target: Symbol) {
def BIND(body: Tree) = Bind(target, body)
-
- // Option
- def IS_DEFINED() =
- if (target.tpe.typeSymbol == SomeClass) TRUE // is Some[_]
- else NOT(ID(target) DOT nme.isEmpty) // is Option[_]
-
def IS_NULL() = REF(target) OBJ_EQ NULL
def NOT_NULL() = REF(target) OBJ_NE NULL
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index f27a765f78..268e104309 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -211,7 +211,7 @@ abstract class TreeGen {
if (tree.tpe != null || !tree.hasSymbol) tree.tpe
else tree.symbol.tpe
- if (!global.phase.erasedTypes && settings.Xchecknull.value &&
+ if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
tpe <:< NotNullClass.tpe && !tpe.isNotNull)
mkRuntimeCall(nme.checkInitialized, List(tree))
else
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 8a6f26c9b1..730d00521e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -96,10 +96,11 @@ abstract class TreeInfo {
false
}
- def mayBeVarGetter(sym: Symbol) = sym.info match {
- case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
- case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
- case _ => false
+ def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
+ case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
+ case PolyType(_, NullaryMethodType(_)) => sym.owner.isClass && !sym.isStable
+ case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
+ case _ => false
}
def isVariableOrGetter(tree: Tree) = {
@@ -361,8 +362,9 @@ abstract class TreeInfo {
case _ => false
}
- /** Some handy extractors for spotting true and false expressions
- * through the haze of braces.
+ /** Some handy extractors for spotting trees through the
+ * the haze of irrelevant braces: i.e. Block(Nil, SomeTree)
+ * should not keep us from seeing SomeTree.
*/
abstract class SeeThroughBlocks[T] {
protected def unapplyImpl(x: Tree): T
@@ -372,9 +374,21 @@ abstract class TreeInfo {
}
}
object IsTrue extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(true))
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(true)) => true
+ case _ => false
+ }
}
object IsFalse extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(false))
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(false)) => true
+ case _ => false
+ }
+ }
+ object IsIf extends SeeThroughBlocks[Option[(Tree, Tree, Tree)]] {
+ protected def unapplyImpl(x: Tree) = x match {
+ case If(cond, thenp, elsep) => Some(cond, thenp, elsep)
+ case _ => None
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 56a7ee6ec8..6d7b4304ec 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -949,6 +949,10 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
if (tree eq orig) super.transform(tree)
else tree
}
+ // Create a readable string describing a substitution.
+ private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
+ "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
+ }
class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
@@ -961,11 +965,13 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
case _ =>
super.transform(tree)
}
+ override def toString = substituterString("Symbol", "Tree", from, to)
}
class TreeTypeSubstituter(val from: List[Symbol], val to: List[Type]) extends Traverser {
val typeSubst = new SubstTypeMap(from, to)
def fromContains = typeSubst.fromContains
+ def isEmpty = from.isEmpty && to.isEmpty
override def traverse(tree: Tree) {
if (tree.tpe ne null) tree.tpe = typeSubst(tree.tpe)
@@ -994,7 +1000,7 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
super.traverse(tree)
}
override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
- override def toString() = "TreeSymSubstTraverser("+from+","+to+")"
+ override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to)
}
/** Substitute symbols in 'from' with symbols in 'to'. Returns a new
@@ -1025,7 +1031,7 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
super.transform(tree)
}
def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- override def toString() = "TreeSymSubstituter("+from+","+to+")"
+ override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
}
class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 88b998f1ac..b5ec0ceffb 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -21,12 +21,15 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
override val keepsTypeParams = false
def apply(unit: global.CompilationUnit) {
- global.informProgress("parsing " + unit)
+ import global._
+ informProgress("parsing " + unit)
unit.body =
if (unit.source.file.name.endsWith(".java")) new JavaUnitParser(unit).parse()
- else if (!global.reporter.incompleteHandled) new UnitParser(unit).smartParse()
- else new UnitParser(unit).parse()
- if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body)
+ else if (reporter.incompleteHandled) new UnitParser(unit).parse()
+ else new UnitParser(unit).smartParse()
+
+ if (settings.Yrangepos.value && !reporter.hasErrors)
+ validatePositions(unit.body)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 1891c8f13d..522b1ddd39 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -13,10 +13,10 @@ import scala.tools.util.PathResolver
trait JavaPlatform extends Platform[AbstractFile] {
import global._
- import definitions.{ BoxesRunTimeClass, getMember }
+ import definitions._
lazy val classPath = new PathResolver(settings).result
- def rootLoader = new loaders.JavaPackageLoader(classPath)
+ def rootLoader = new loaders.JavaPackageLoader(classPath)
private def depAnalysisPhase =
if (settings.make.isDefault) Nil
@@ -26,20 +26,19 @@ trait JavaPlatform extends Platform[AbstractFile] {
flatten, // get rid of inner classes
liftcode, // generate reified trees
genJVM // generate .class files
- ) ::: depAnalysisPhase
+ ) ++ depAnalysisPhase
- lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
- def externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
- def externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
- def externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+ lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
+ lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
+ lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
+ lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
/** We could get away with excluding BoxedBooleanClass for the
* purpose of equality testing since it need not compare equal
* to anything but other booleans, but it should be present in
* case this is put to other uses.
*/
- def isMaybeBoxed(sym: Symbol): Boolean = {
- import definitions._
+ def isMaybeBoxed(sym: Symbol) = {
(sym == ObjectClass) ||
(sym == JavaSerializableClass) ||
(sym == ComparableClass) ||
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 76e0162593..0725578ecc 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -249,7 +249,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
parents = parents.distinct
if (parents.tail.nonEmpty)
- ifaces = parents drop 1 map (x => javaName(x.typeSymbol)) toArray;
+ ifaces = mkArray(parents drop 1 map (x => javaName(x.typeSymbol)))
jclass = fjbgContext.JClass(javaFlags(c.symbol),
name,
@@ -280,7 +280,17 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
!(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
}
- val lmoc = c.symbol.companionModule
+ // At some point this started throwing lots of exceptions as a compile was finishing.
+ // error: java.lang.AssertionError:
+ // assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable)
+ // ...is the one I've seen repeatedly. Suppressing.
+ val lmoc = (
+ try c.symbol.companionModule
+ catch { case x: AssertionError =>
+ Console.println("Suppressing failed assert: " + x)
+ NoSymbol
+ }
+ )
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol && !c.symbol.isInterface) {
if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
@@ -567,7 +577,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* in which case we treat every signature as valid. Medium term we
* should certainly write independent signature validation.
*/
- if (SigParser.isParserAvailable && !isValidSignature(sym, sig)) {
+ if (settings.Xverify.value && SigParser.isParserAvailable && !isValidSignature(sym, sig)) {
clasz.cunit.warning(sym.pos,
"""|compiler bug: created invalid generic signature for %s in %s
|signature: %s
@@ -744,8 +754,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
jmethod = jclass.addNewMethod(flags,
javaName(m.symbol),
resTpe,
- m.params map (p => javaType(p.kind)) toArray,
- m.params map (p => javaName(p.sym)) toArray)
+ mkArray(m.params map (p => javaType(p.kind))),
+ mkArray(m.params map (p => javaName(p.sym))))
addRemoteException(jmethod, m.symbol)
@@ -939,8 +949,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
accessFlags,
javaName(m),
javaType(methodInfo.resultType),
- paramJavaTypes.toArray,
- paramNames.toArray)
+ mkArray(paramJavaTypes),
+ mkArray(paramNames))
val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode]
mirrorCode.emitGETSTATIC(moduleName,
nme.MODULE_INSTANCE_FIELD.toString,
@@ -1483,8 +1493,9 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
// val crtLine = instr.pos.line.get(lastLineNr);
+
val crtLine = try {
- (instr.pos).line
+ if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
} catch {
case _: UnsupportedOperationException =>
log("Warning: wrong position in: " + method)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
index 6ff5d42e55..418dbea9e1 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -68,6 +68,14 @@ trait GenJVMUtil {
GE -> LT
)
+ /** Specialized array conversion to prevent calling
+ * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
+ */
+
+ def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
+ def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
+
+
/** Return the a name of this symbol that can be used on the Java
* platform. It removes spaces from names.
*
@@ -109,7 +117,7 @@ trait GenJVMUtil {
if (s.isMethod)
new JMethodType(
if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
- s.tpe.paramTypes map javaType toArray
+ mkArray(s.tpe.paramTypes map javaType)
)
else
javaType(s.tpe)
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 1f0c85361e..9b29ebd745 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -13,7 +13,7 @@ trait Uncompilable {
val global: Global
val settings: Settings
- import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment }
+ import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
import global.definitions.RootClass
private implicit def translateName(name: Global#Name) =
@@ -43,6 +43,6 @@ trait Uncompilable {
pairs
}
override def toString = pairs.size + " uncompilable symbols:\n" + (
- symbols map (x => " " + x.owner.fullName + " " + x.defString) mkString "\n"
+ symbols filterNot (_ == NoSymbol) map (x => " " + x.owner.fullName + " " + x.defString) mkString "\n"
)
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 8fc044c195..88cd589797 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -97,6 +97,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
copyResource("lib/unselected.png")
new page.Index(universe, index) writeFor this
+ new page.IndexScript(universe, index) writeFor this
writeTemplates(page => page.writeFor(this))
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index d99403e5ba..cfa846b097 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -15,18 +15,11 @@ import xml.dtd.{DocType, PublicID}
import scala.collection._
import scala.reflect.NameTransformer
import java.nio.channels.Channels
-import java.io.{FileOutputStream, File}
/** An html page that is part of a Scaladoc site.
* @author David Bernard
* @author Gilles Dubochet */
-abstract class HtmlPage { thisPage =>
-
- /** The path of this page, relative to the API site. `path.tail` is a list of folder names leading to this page (from
- * closest package to one-above-root package), `path.head` is the file name of this page. Note that `path` has a
- * length of at least one. */
- def path: List[String]
-
+abstract class HtmlPage extends Page { thisPage =>
/** The title of this page. */
protected def title: String
@@ -36,9 +29,6 @@ abstract class HtmlPage { thisPage =>
/** The body of this page. */
def body: NodeSeq
- /** Writes this page as a file. The file's location is relative to the generator's site root, and the encoding is
- * also defined by the generator.
- * @param generator The generator that is writing this page. */
def writeFor(site: HtmlFactory): Unit = {
val doctype =
DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
@@ -51,10 +41,7 @@ abstract class HtmlPage { thisPage =>
</head>
{ body }
</html>
- val pageFile = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
- val pageFolder = pageFile.getParentFile
- if (!pageFolder.exists) pageFolder.mkdirs()
- val fos = new FileOutputStream(pageFile.getPath)
+ val fos = createFileOutputStream(site)
val w = Channels.newWriter(fos.getChannel, site.encoding)
try {
w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
@@ -68,52 +55,6 @@ abstract class HtmlPage { thisPage =>
//XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
}
- def templateToPath(tpl: TemplateEntity): List[String] = {
- def doName(tpl: TemplateEntity): String =
- NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
- def downPacks(pack: Package): List[String] =
- if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
- def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
- tpl.inTemplate match {
- case inPkg: Package => (nme + ".html", inPkg)
- case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
- }
- }
- val (file, pack) =
- tpl match {
- case p: Package => ("package.html", p)
- case _ => downInner(doName(tpl), tpl)
- }
- file :: downPacks(pack)
- }
-
- /** A relative link from this page to some destination class entity.
- * @param destEntity The class or object entity that the link will point to. */
- def relativeLinkTo(destClass: TemplateEntity): String =
- relativeLinkTo(templateToPath(destClass))
-
- /** A relative link from this page to some destination page in the Scaladoc site.
- * @param destPage The page that the link will point to. */
- def relativeLinkTo(destPage: HtmlPage): String = {
- relativeLinkTo(destPage.path)
- }
-
- /** A relative link from this page to some destination path.
- * @param destPath The path that the link will point to. */
- def relativeLinkTo(destPath: List[String]): String = {
- def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
- case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
- relativize(fs, ts)
- case (fss, tss) =>
- List.fill(fss.length - 1)("..") ::: tss
- }
- relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
- }
-
- def absoluteLinkTo(destPath: List[String]): String = {
- destPath.reverse.mkString("/")
- }
-
/** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty
* node sequence if it is not. */
def commentToHtml(comment: Option[Comment]): NodeSeq =
@@ -229,14 +170,6 @@ abstract class HtmlPage { thisPage =>
case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
}
- def docEntityKindToString(ety: DocTemplateEntity) =
- if (ety.isTrait) "trait"
- else if (ety.isCaseClass) "case class"
- else if (ety.isClass) "class"
- else if (ety.isObject) "object"
- else if (ety.isPackage) "package"
- else "class" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
-
/** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
def docEntityKindToBigImage(ety: DocTemplateEntity) =
if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
new file mode 100644
index 0000000000..f72b0a49eb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala
@@ -0,0 +1,93 @@
+package scala.tools.nsc.doc.html
+import scala.tools.nsc.doc.model._
+import java.io.{FileOutputStream, File}
+import scala.reflect.NameTransformer
+
+abstract class Page {
+ thisPage =>
+
+ /** The path of this page, relative to the API site. `path.tail` is a list of folder names leading to this page (from
+ * closest package to one-above-root package), `path.head` is the file name of this page. Note that `path` has a
+ * length of at least one. */
+ def path: List[String]
+
+ def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
+
+ def createFileOutputStream(site: HtmlFactory) = {
+ val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
+ val folder = file.getParentFile
+ if (! folder.exists) {
+ folder.mkdirs
+ }
+ new FileOutputStream(file.getPath)
+ }
+
+ /** Writes this page as a file. The file's location is relative to the generator's site root, and the encoding is
+ * also defined by the generator.
+ * @param generator The generator that is writing this page. */
+ def writeFor(site: HtmlFactory): Unit
+
+ def docEntityKindToString(ety: DocTemplateEntity) =
+ if (ety.isTrait) "trait"
+ else if (ety.isCaseClass) "case class"
+ else if (ety.isClass) "class"
+ else if (ety.isObject) "object"
+ else if (ety.isPackage) "package"
+ else "class" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+
+ def templateToPath(tpl: TemplateEntity): List[String] = {
+ def doName(tpl: TemplateEntity): String =
+ NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
+ def downPacks(pack: Package): List[String] =
+ if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
+ def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
+ tpl.inTemplate match {
+ case inPkg: Package => (nme + ".html", inPkg)
+ case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl)
+ }
+ }
+ val (file, pack) =
+ tpl match {
+ case p: Package => ("package.html", p)
+ case _ => downInner(doName(tpl), tpl)
+ }
+ file :: downPacks(pack)
+ }
+
+ /** A relative link from this page to some destination class entity.
+ * @param destEntity The class or object entity that the link will point to. */
+ def relativeLinkTo(destClass: TemplateEntity): String =
+ relativeLinkTo(templateToPath(destClass))
+
+ /** A relative link from this page to some destination page in the Scaladoc site.
+ * @param destPage The page that the link will point to. */
+ def relativeLinkTo(destPage: HtmlPage): String = {
+ relativeLinkTo(destPage.path)
+ }
+
+ /** A relative link from this page to some destination path.
+ * @param destPath The path that the link will point to. */
+ def relativeLinkTo(destPath: List[String]): String = {
+ def relativize(from: List[String], to: List[String]): List[String] = (from, to) match {
+ case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point
+ relativize(fs, ts)
+ case (fss, tss) =>
+ List.fill(fss.length - 1)("..") ::: tss
+ }
+ relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
+ }
+
+ def isExcluded(dtpl: DocTemplateEntity) = {
+ val qname = dtpl.qualifiedName
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 4fc10b2ec2..7203230084 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -48,23 +48,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</div>
</body>
-
- def isExcluded(dtpl: DocTemplateEntity) = {
- val qname = dtpl.qualifiedName
- ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
- qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
- ) && !(
- qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
- qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
- qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
- qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
- qname == "scala.runtime.AbstractFunction2"
- )
- )
- }
-
def browser =
- <xml:group>
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
<div id="filter"></div>
@@ -121,60 +105,8 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</xml:group>
}
packageElem(universe.rootPackage)
- }</div></div>{ scriptElement }
+ }</div></div><script src="index.js"></script>
</div>
- </xml:group>
-
- def mergeByQualifiedName(source: List[DocTemplateEntity]): Map[String, List[DocTemplateEntity]]= {
- var result = Map[String, List[DocTemplateEntity]]()
-
- for (t <- source) {
- val k = t.qualifiedName
- result += k -> (result.getOrElse(k, List()) :+ t)
- }
-
- result
- }
-
- def scriptElement = {
- val packages = allPackagesWithTemplates.toIterable.map(_ match {
- case (pack, templates) => {
- val merged = mergeByQualifiedName(templates)
-
- val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
- val pairs = merged(key).map(
- t => docEntityKindToString(t) -> relativeLinkTo(t)
- ) :+ ("name" -> key)
-
- JSONObject(scala.collection.immutable.Map(pairs : _*))
- })
-
- pack.qualifiedName -> JSONArray(ary)
- }
- }).toSeq
-
- val obj =
- JSONObject(scala.collection.immutable.Map(packages : _*)).toString()
-
- <script type="text/javascript">
- Index.PACKAGES = {scala.xml.Unparsed(obj)};
- </script>
- }
-
- def allPackagesWithTemplates: Map[Package, List[DocTemplateEntity]] = {
- Map(allPackages.map((key) => {
- key -> key.templates.filter(t => !t.isPackage && !isExcluded(t))
- }) : _*)
- }
-
- def allPackages: List[Package] = {
- def f(parent: Package): List[Package] = {
- parent.packages.flatMap(
- p => f(p) :+ p
- )
- }
- f(universe.rootPackage).sortBy(_.toString)
- }
def packageQualifiedName(ety: DocTemplateEntity): String =
if (ety.inTemplate.isPackage) ety.name else (packageQualifiedName(ety.inTemplate) + "." + ety.name)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
new file mode 100644
index 0000000000..2cafe6caa3
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -0,0 +1,68 @@
+package scala.tools.nsc.doc.html.page
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
+import scala.tools.nsc.doc.html.{Page, HtmlFactory}
+import java.nio.channels.Channels
+import scala.util.parsing.json.{JSONObject, JSONArray}
+
+class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
+ def path = List("index.js")
+
+ override def writeFor(site: HtmlFactory): Unit = {
+ val stream = createFileOutputStream(site)
+ val writer = Channels.newWriter(stream.getChannel, site.encoding)
+ try {
+ writer.write("Index.PACKAGES = " + packages.toString() + ";")
+ }
+ finally {
+ writer.close
+ stream.close
+ }
+ }
+
+ val packages = {
+ val pairs = allPackagesWithTemplates.toIterable.map(_ match {
+ case (pack, templates) => {
+ val merged = mergeByQualifiedName(templates)
+
+ val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
+ val pairs = merged(key).map(
+ t => docEntityKindToString(t) -> relativeLinkTo(t)
+ ) :+ ("name" -> key)
+
+ JSONObject(scala.collection.immutable.Map(pairs : _*))
+ })
+
+ pack.qualifiedName -> JSONArray(ary)
+ }
+ }).toSeq
+
+ JSONObject(scala.collection.immutable.Map(pairs : _*))
+ }
+
+ def mergeByQualifiedName(source: List[DocTemplateEntity]) = {
+ var result = Map[String, List[DocTemplateEntity]]()
+
+ for (t <- source) {
+ val k = t.qualifiedName
+ result += k -> (result.getOrElse(k, List()) :+ t)
+ }
+
+ result
+ }
+
+ def allPackages = {
+ def f(parent: Package): List[Package] = {
+ parent.packages.flatMap(
+ p => f(p) :+ p
+ )
+ }
+ f(universe.rootPackage).sortBy(_.toString)
+ }
+
+ def allPackagesWithTemplates = {
+ Map(allPackages.map((key) => {
+ key -> key.templates.filter(t => !t.isPackage && !isExcluded(t))
+ }) : _*)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index fe14a1997a..b701e0e2ad 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -31,9 +31,12 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val valueMembers =
tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
- val (absValueMembers, concValueMembers) =
+ val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
+ val (deprValueMembers, concValueMembers) =
+ nonAbsValueMembers partition (_.deprecation.isDefined)
+
val typeMembers =
tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted
@@ -132,6 +135,13 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
</div>
}
+ { if (deprValueMembers.isEmpty) NodeSeq.Empty else
+ <div id="values" class="values members">
+ <h3>Deprecated Value Members</h3>
+ <ol>{ deprValueMembers map (memberToHtml(_)) }</ol>
+ </div>
+ }
+
{
NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
<div class="parent" name={ superTpl.qualifiedName }>
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index da97e10584..f245178ccd 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -942,10 +942,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
* @return true iff typechecked correctly
*/
private def applyPhase(phase: Phase, unit: CompilationUnit) {
- val oldSource = reporter.getSource
- reporter.withSource(unit.source) {
- atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
- }
+ atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
}
}
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 12a632fca6..499af546c1 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -9,8 +9,6 @@ package io
import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
import java.net.URL
-import PartialFunction._
-
import scala.collection.mutable.ArrayBuffer
/**
@@ -19,7 +17,7 @@ import scala.collection.mutable.ArrayBuffer
*/
object AbstractFile {
/** Returns "getFile(new File(path))". */
- def getFile(path: String): AbstractFile = getFile(Path(path))
+ def getFile(path: String): AbstractFile = getFile(File(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
/**
@@ -42,7 +40,7 @@ object AbstractFile {
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
- else if (file.isFile && Path.isJarOrZip(file)) ZipArchive fromFile file
+ else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file
else null
/**
@@ -54,7 +52,7 @@ object AbstractFile {
* @return ...
*/
def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isJarOrZip(url.getPath)) null
+ if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
else ZipArchive fromURL url
}
}
@@ -92,7 +90,8 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
def path: String
/** Checks extension case insensitively. */
- def hasExtension(other: String) = Path(path) hasExtension other
+ def hasExtension(other: String) = extension == other.toLowerCase
+ private lazy val extension: String = Path.extension(name)
/** The absolute file, if this is a relative file. */
def absolute: AbstractFile
@@ -102,7 +101,6 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns the underlying File if any and null otherwise. */
def file: JFile
- def sfile = Option(file) map (x => File(x)) // XXX
/** An underlying source, if known. Mostly, a zip/jar file. */
def underlyingSource: Option[AbstractFile] = None
@@ -111,7 +109,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
def exists: Boolean = (file eq null) || file.exists
/** Does this abstract file represent something which can contain classfiles? */
- def isClassContainer = isDirectory || (sfile exists (Path isJarOrZip _))
+ def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
/** Create a file on disk, if one does not exist already. */
def create(): Unit
@@ -137,6 +135,8 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** size of this file if it is a concrete file. */
def sizeOption: Option[Int] = None
+ def toURL: URL = if (file == null) null else file.toURI.toURL
+
/** Returns contents of file (if applicable) in a Char array.
* warning: use <code>Global.getSourceFile()</code> to use the proper
* encoding when converting to the char array.
@@ -199,7 +199,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile,
path0: String,
directory: Boolean): AbstractFile = {
- val separator = JFile.separatorChar
+ val separator = java.io.File.separatorChar
// trim trailing '/'s
val path: String = if (path0.last == separator) path0 dropRight 1 else path0
val length = path.length()
@@ -217,13 +217,23 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
file
}
+ private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = {
+ val lookup = lookupName(name, isDir)
+ if (lookup != null) lookup
+ else {
+ val jfile = new JFile(file, name)
+ if (isDir) jfile.mkdirs() else jfile.createNewFile()
+ new PlainFile(jfile)
+ }
+ }
+
/**
* Get the file in this directory with the given name,
* creating an empty file if it does not already existing.
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- Option(lookupName(name, false)) getOrElse new PlainFile((sfile.get / name).createFile())
+ fileOrSubdirectoryNamed(name, false)
}
/**
@@ -232,7 +242,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- Option(lookupName(name, true)) getOrElse new PlainFile((sfile.get / name).createDirectory())
+ fileOrSubdirectoryNamed(name, true)
}
protected def unsupported(): Nothing = unsupported(null)
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index b11151ab7e..cc512493d9 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -17,14 +17,14 @@ import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
object File {
- def pathSeparator = JFile.pathSeparator
- def separator = JFile.separator
+ def pathSeparator = java.io.File.pathSeparator
+ def separator = java.io.File.separator
def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
// Create a temporary file, which will be deleted upon jvm exit.
def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) = {
- val jfile = JFile.createTempFile(prefix, suffix, dir)
+ val jfile = java.io.File.createTempFile(prefix, suffix, dir)
jfile.deleteOnExit()
apply(jfile)
}
@@ -43,20 +43,23 @@ object File {
// trigger java.lang.InternalErrors later when using it concurrently. We ignore all
// the exceptions so as not to cause spurious failures when no write access is available,
// e.g. google app engine.
- try {
- import Streamable.closing
- val tmp = JFile.createTempFile("bug6503430", null, null)
- try closing(new FileInputStream(tmp)) { in =>
- val inc = in.getChannel()
- closing(new FileOutputStream(tmp, true)) { out =>
- out.getChannel().transferFrom(inc, 0, 0)
- }
- }
- finally tmp.delete()
- }
- catch {
- case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
- }
+ //
+ // XXX need to put this behind a setting.
+ //
+ // try {
+ // import Streamable.closing
+ // val tmp = java.io.File.createTempFile("bug6503430", null, null)
+ // try closing(new FileInputStream(tmp)) { in =>
+ // val inc = in.getChannel()
+ // closing(new FileOutputStream(tmp, true)) { out =>
+ // out.getChannel().transferFrom(inc, 0, 0)
+ // }
+ // }
+ // finally tmp.delete()
+ // }
+ // catch {
+ // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
+ // }
}
import File._
import Path._
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index 3cfab55aaa..f08edb126f 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -28,6 +28,19 @@ import scala.util.Random.alphanumeric
*/
object Path {
+ def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName)
+ def isExtensionJarOrZip(name: String): Boolean = {
+ val ext = extension(name)
+ ext == "jar" || ext == "zip"
+ }
+ def extension(name: String): String = {
+ var i = name.length - 1
+ while (i >= 0 && name.charAt(i) != '.')
+ i -= 1
+
+ if (i < 0) ""
+ else name.substring(i + 1).toLowerCase
+ }
def isJarOrZip(f: Path, examineFile: Boolean = true) = Jar.isJarOrZip(f, examineFile)
// not certain these won't be problematic, but looks good so far
@@ -52,9 +65,9 @@ object Path {
def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
- def roots: List[Path] = JFile.listRoots().toList map Path.apply
+ def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
- def apply(segments: Seq[String]): Path = apply(segments mkString JFile.separator)
+ def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
def apply(path: String): Path = apply(new JFile(path))
def apply(jfile: JFile): Path =
if (jfile.isFile) new File(jfile)
@@ -71,8 +84,8 @@ import Path._
* semantics regarding how a Path might relate to the world.
*/
class Path private[io] (val jfile: JFile) {
- val separator = JFile.separatorChar
- val separatorStr = JFile.separator
+ val separator = java.io.File.separatorChar
+ val separatorStr = java.io.File.separator
// Validation: this verifies that the type of this object and the
// contents of the filesystem are in agreement. All objects are
@@ -84,6 +97,7 @@ class Path private[io] (val jfile: JFile) {
def toFile: File = new File(jfile)
def toDirectory: Directory = new Directory(jfile)
def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath())
+ def toCanonical: Path = Path(jfile.getCanonicalPath())
def toURI: URI = jfile.toURI()
def toURL: URL = toURI.toURL()
/** If this path is absolute, returns it: otherwise, returns an absolute
@@ -117,7 +131,7 @@ class Path private[io] (val jfile: JFile) {
// identity
def name: String = jfile.getName()
def path: String = jfile.getPath()
- def normalize: Path = Path(jfile.getCanonicalPath())
+ def normalize: Path = Path(jfile.getAbsolutePath())
def isRootPath: Boolean = roots exists (_ isSame this)
def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
@@ -159,14 +173,22 @@ class Path private[io] (val jfile: JFile) {
if (p isSame this) Nil else p :: p.parents
}
// if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise ""
- def extension: String = (name lastIndexOf '.') match {
- case -1 => ""
- case idx => name drop (idx + 1)
+ def extension: String = {
+ var i = name.length - 1
+ while (i >= 0 && name.charAt(i) != '.')
+ i -= 1
+
+ if (i < 0) ""
+ else name.substring(i + 1)
}
+ // def extension: String = (name lastIndexOf '.') match {
+ // case -1 => ""
+ // case idx => name drop (idx + 1)
+ // }
// compares against extensions in a CASE INSENSITIVE way.
def hasExtension(ext: String, exts: String*) = {
- val xs = (ext +: exts) map (_.toLowerCase)
- xs contains extension.toLowerCase
+ val lower = extension.toLowerCase
+ ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower)
}
// returns the filename without the extension.
def stripExtension: String = name stripSuffix ("." + extension)
@@ -191,7 +213,7 @@ class Path private[io] (val jfile: JFile) {
def isHidden = jfile.isHidden()
def isSymlink = {
val x = parent / name
- x.normalize != x.toAbsolute
+ x.toCanonical != x.toAbsolute
}
def isEmpty = path.length == 0
@@ -203,7 +225,7 @@ class Path private[io] (val jfile: JFile) {
// Boolean path comparisons
def endsWith(other: Path) = segments endsWith other.segments
def startsWith(other: Path) = segments startsWith other.segments
- def isSame(other: Path) = normalize == other.normalize
+ def isSame(other: Path) = toCanonical == other.toCanonical
def isFresher(other: Path) = lastModified > other.lastModified
// creations
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
index ce4bca490f..83b8cc32c4 100644
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ b/src/compiler/scala/tools/nsc/io/PlainFile.scala
@@ -16,7 +16,15 @@ object PlainFile {
* by it. Otherwise, returns null.
*/
def fromPath(file: Path): PlainFile =
- if (file.exists) new PlainFile(file) else null
+ if (file.isDirectory) new PlainDirectory(file.toDirectory)
+ else if (file.isFile) new PlainFile(file)
+ else null
+}
+
+class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
+ override def isDirectory = true
+ override def iterator = givenPath.list filter (_.exists) map (x => new PlainFile(x))
+ override def delete(): Unit = givenPath.deleteRecursively()
}
/** This class implements an abstract file backed by a File.
@@ -27,7 +35,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
val file = givenPath.jfile
override def underlyingSource = Some(this)
- private val fpath = try givenPath.normalize catch { case _: IOException => givenPath.toAbsolute }
+ private val fpath = givenPath.toAbsolute
/** Returns the name of this abstract file. */
def name = givenPath.name
@@ -36,7 +44,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
def path = givenPath.path
/** The absolute file. */
- def absolute = new PlainFile(givenPath.normalize)
+ def absolute = new PlainFile(givenPath.toAbsolute)
override def container: AbstractFile = new PlainFile(givenPath.parent)
override def input = givenPath.toFile.inputStream()
@@ -44,8 +52,10 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
override def sizeOption = Some(givenPath.length.toInt)
override def hashCode(): Int = fpath.hashCode
- override def equals(that: Any): Boolean =
- cond(that) { case other: PlainFile => fpath == other.fpath }
+ override def equals(that: Any): Boolean = that match {
+ case x: PlainFile => fpath == x.fpath
+ case _ => false
+ }
/** Is this abstract file a directory? */
def isDirectory: Boolean = givenPath.isDirectory
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 7c9b776eb3..324c5e4111 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -33,58 +33,38 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
"Please try specifying another one using the -encoding option")
}
- //########################################################################
- // Public Methods
-
/** Reads the file with the specified name. */
def read(filename: String): Array[Char]= read(new JFile(filename))
/** Reads the specified file. */
def read(file: JFile): Array[Char] = {
val c = new FileInputStream(file).getChannel
- try {
- read(c)
- } catch {
- case e:Exception =>
- if (true) e.printStackTrace
- reportEncodingError(file.toString())
- new Array[Char](0)
- } finally {
- c.close()
- }
+
+ try read(c)
+ catch { case e: Exception => reportEncodingError("" + file) ; Array() }
+ finally c.close()
}
/** Reads the specified file.
- *
- * @param file ...
- * @return ...
*/
def read(file: AbstractFile): Array[Char] = {
- file match {
- case p:PlainFile =>
- read(p.file) // bq: (!!!)
- case z:ZipArchive#FileEntry =>
- val c = Channels.newChannel(z.archive.getInputStream(z.entry))
- read(c)
- case _ =>
- val b = ByteBuffer.wrap(file.toByteArray)
- try {
- read(b)
- } catch {
- case e:Exception =>
- if (true) e.printStackTrace
- reportEncodingError(file.toString())
- new Array[Char](0)
- }
+ try file match {
+ case p: PlainFile => read(p.file)
+ case z: ZipArchive#Entry => read(Channels.newChannel(z.input))
+ case _ => read(ByteBuffer.wrap(file.toByteArray))
+ }
+ catch {
+ case e: Exception => reportEncodingError("" + file) ; Array()
}
}
/** Reads the specified byte channel. */
protected def read(input: ReadableByteChannel): Array[Char] = {
val decoder: CharsetDecoder = this.decoder.reset()
- val bytes: ByteBuffer = this.bytes; bytes.clear()
- var chars: CharBuffer = this.chars; chars.clear()
- var endOfInput: Boolean = false
+ val bytes: ByteBuffer = this.bytes; bytes.clear()
+ var chars: CharBuffer = this.chars; chars.clear()
+ var endOfInput = false
+
while (!endOfInput ) {
endOfInput = input.read(bytes) < 0
bytes.flip()
diff --git a/src/compiler/scala/tools/nsc/io/VirtualFile.scala b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
index 4a8aa1a69d..450d86b599 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
@@ -8,20 +8,13 @@ package scala.tools.nsc
package io
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
-import PartialFunction._
/** This class implements an in-memory file.
*
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
-class VirtualFile(val name: String, _path: String) extends AbstractFile
-{
- assert((name ne null) && (path ne null), name + " - " + path)
-
- //########################################################################
- // Public Constructors
-
+class VirtualFile(val name: String, override val path: String) extends AbstractFile {
/**
* Initializes this instance with the specified name and an
* identical path.
@@ -31,8 +24,11 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
*/
def this(name: String) = this(name, name)
- override def hashCode = path.##
- override def equals(that: Any) = cond(that) { case x: VirtualFile => x.path == path }
+ override def hashCode = path.hashCode
+ override def equals(that: Any) = that match {
+ case x: VirtualFile => x.path == path
+ case _ => false
+ }
//########################################################################
// Private data
@@ -40,9 +36,6 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
//########################################################################
// Public Methods
-
- def path = _path
-
def absolute = this
/** Returns null. */
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index 22121cc714..90cb827280 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -1,317 +1,215 @@
/* NSC -- new Scala compiler
* Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
+ * @author Paul Phillips
*/
-
package scala.tools.nsc
package io
import java.net.URL
-import java.util.Enumeration
-import java.io.{ IOException, InputStream, BufferedInputStream, ByteArrayInputStream }
+import java.io.{ IOException, InputStream, ByteArrayInputStream }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
-import PartialFunction._
-
-import scala.collection.mutable.{ Map, HashMap }
+import scala.collection.{ immutable, mutable }
import annotation.tailrec
-/**
- * @author Philippe Altherr
- * @version 1.0, 23/03/2004
+/** An abstraction for zip files and streams. Everything is written the way
+ * it is for performance: we come through here a lot on every run. Be careful
+ * about changing it.
+ *
+ * @author Philippe Altherr (original version)
+ * @author Paul Phillips (this one)
+ * @version 2.0,
*/
object ZipArchive {
- def fromPath(path: Path): ZipArchive = fromFile(path.toFile)
+ def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
+ def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
/**
- * If the specified file <code>file</code> exists and is a readable
- * zip archive, returns an abstract file backed by it. Otherwise,
- * returns <code>null</code>.
- *
- * @param file ...
- * @return ...
+ * @param file a File
+ * @return A ZipArchive if `file` is a readable zip file, otherwise null.
*/
- def fromFile(file: File): ZipArchive =
- try new ZipArchive(file, new ZipFile(file.jfile))
+ def fromFile(file: File): FileZipArchive = fromFile(file.jfile)
+ def fromFile(file: JFile): FileZipArchive =
+ try { new FileZipArchive(file) }
catch { case _: IOException => null }
/**
- * Returns an abstract directory backed by the specified archive.
- */
- def fromArchive(archive: ZipFile): ZipArchive =
- new ZipArchive(File(archive.getName()), archive)
-
- /**
- * Returns an abstract directory backed by the specified archive.
+ * @param url the url of a zip file
+ * @return A ZipArchive backed by the given url.
*/
- def fromURL(url: URL): AbstractFile = new URLZipArchive(url)
-
- private[io] trait ZipTrav extends Traversable[ZipEntry] {
- def zis: () => ZipInputStream
+ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
+ def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
+
+ private def dirName(path: String) = splitPath(path, true)
+ private def baseName(path: String) = splitPath(path, false)
+ private def splitPath(path0: String, front: Boolean): String = {
+ val isDir = path0.charAt(path0.length - 1) == '/'
+ val path = if (isDir) path0.substring(0, path0.length - 1) else path0
+ val idx = path.lastIndexOf('/')
+
+ if (idx < 0)
+ if (front) "/"
+ else path
+ else
+ if (front) path.substring(0, idx + 1)
+ else path.substring(idx + 1)
}
+}
+import ZipArchive._
- private[io] class ZipEntryTraversableClass(in: InputStream) extends ZipTrav {
- val zis = () => new ZipInputStream(in)
-
- def foreach[U](f: ZipEntry => U) = {
- var in: ZipInputStream = null
- @tailrec def loop(): Unit = {
- if (in.available == 0)
- return
+abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals {
+ self =>
- val entry = in.getNextEntry()
- if (entry != null) {
- f(entry)
- in.closeEntry()
- loop()
- }
- }
+ override def underlyingSource = Some(this)
+ def isDirectory = true
+ def lookupName(name: String, directory: Boolean) = unsupported
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
+ def create() = unsupported
+ def delete() = unsupported
+ def output = unsupported
+ def container = unsupported
+ def absolute = unsupported
- try {
- in = zis()
- loop()
- }
- finally in.close()
+ private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
+ its flatMap { f =>
+ if (f.isDirectory) walkIterator(f.iterator)
+ else Iterator(f)
}
}
-}
-import ZipArchive.ZipTrav
-
-/** This abstraction aims to factor out the common code between
- * ZipArchive (backed by a zip file) and URLZipArchive (backed
- * by an InputStream.)
- */
-private[io] trait ZipContainer extends AbstractFile {
- /** Abstract types */
- type SourceType // InputStream or AbstractFile
- type CreationType // InputStream or ZipFile
-
- /** Abstract values */
- protected val creationSource: CreationType
- protected val root: DirEntryInterface
- protected def DirEntryConstructor: (AbstractFile, String, String) => DirEntryInterface
- protected def FileEntryConstructor: (SourceType, String, String, ZipEntry) => FileEntryInterface
- protected def ZipTravConstructor: CreationType => ZipTrav
+ def deepIterator = walkIterator(iterator)
- protected[io] trait EntryInterface extends VirtualFile {
- def name: String
- def path: String
+ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
+ // have to keep this name for compat with sbt's compiler-interface
+ def getArchive: ZipFile = null
+ override def underlyingSource = Some(self)
+ override def toString = self.path + "(" + path + ")"
}
-
- protected[io] trait DirEntryInterface extends EntryInterface {
- def source: SourceType
- val entries: Map[String, EntryInterface] = new HashMap()
- var entry: ZipEntry = _
-
- override def input = throw new Error("cannot read directories")
- override def lastModified: Long =
- if (entry ne null) entry.getTime() else super.lastModified
+ class DirEntry(path: String) extends Entry(path) {
+ val entries = mutable.HashMap[String, Entry]()
override def isDirectory = true
- override def iterator: Iterator[AbstractFile] = entries.valuesIterator
- override def lookupName(name: String, directory: Boolean): AbstractFile = {
- def slashName = if (directory) name + "/" else name
- entries.getOrElse(slashName, null)
+ override def iterator = entries.valuesIterator
+ override def lookupName(name: String, directory: Boolean): Entry = {
+ if (directory) entries(name + "/")
+ else entries(name)
}
}
- protected[io] trait FileEntryInterface extends EntryInterface {
- def entry: ZipEntry
-
- override def lastModified: Long = entry.getTime()
- override def sizeOption = Some(entry.getSize().toInt)
+ private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = {
+ dirs.getOrElseUpdate(path, {
+ val parent = ensureDir(dirs, dirName(path), null)
+ val dir = new DirEntry(path)
+ parent.entries(baseName(path)) = dir
+ dir
+ })
}
+ protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
+ if (entry.isDirectory) ensureDir(dirs, entry.getName, entry)
+ else ensureDir(dirs, dirName(entry.getName), null)
+ }
+}
- class ZipRootCreator(f: ZipRootCreator => SourceType) {
- val root = DirEntryConstructor(ZipContainer.this, "<root>", "/")
-
- // Map from paths to DirEntries
- val dirs = HashMap[String, DirEntryInterface]("/" -> root)
- val traverser = ZipTravConstructor(creationSource)
- private[this] var _parent: DirEntryInterface = _
- def parent = _parent
-
- def addEntry(entry: ZipEntry) {
- val path = entry.getName
- if (entry.isDirectory) {
- val dir: DirEntryInterface = getDir(dirs, path)
- if (dir.entry == null) dir.entry = entry
- }
+final class FileZipArchive(file: JFile) extends ZipArchive(file) {
+ def iterator = {
+ val zipFile = new ZipFile(file)
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val enum = zipFile.entries()
+
+ while (enum.hasMoreElements) {
+ val zipEntry = enum.nextElement
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory) dir
else {
- val (home, name) = splitPath(path)
- _parent = getDir(dirs, home)
- _parent.entries(name) = FileEntryConstructor(f(this), name, path, entry)
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override def getArchive = zipFile
+ override def lastModified = zipEntry.getTime()
+ override def input = getArchive getInputStream zipEntry
+ override def sizeOption = Some(zipEntry.getSize().toInt)
+ }
+ val f = new FileEntry()
+ dir.entries(f.name) = f
}
}
- def apply() = {
- traverser foreach addEntry
- root
- }
+ try root.iterator
+ finally dirs.clear()
}
- protected def splitPath(path: String): (String, String) = {
- (path lastIndexOf '/') match {
- case -1 => ("/", path)
- case idx => path splitAt (idx + 1)
- }
+ def name = file.getName
+ def path = file.getPath
+ def input = File(file).inputStream()
+ def lastModified = file.lastModified
+
+ override def sizeOption = Some(file.length.toInt)
+ override def canEqual(other: Any) = other.isInstanceOf[FileZipArchive]
+ override def hashCode() = file.hashCode
+ override def equals(that: Any) = that match {
+ case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile
+ case _ => false
}
-
- /**
- * Returns the abstract file in this abstract directory with the
- * specified name. If there is no such file, returns null. The
- * argument "directory" tells whether to look for a directory or
- * or a regular file.
- */
- override def lookupName(name: String, directory: Boolean): AbstractFile =
- root.lookupName(name, directory)
-
- /** Returns an abstract file with the given name. It does not
- * check that it exists.
- */
- override def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-
- /** Returns all abstract subfiles of this abstract directory. */
- override def iterator: Iterator[AbstractFile] = root.iterator
-
- /**
- * Looks up the path in the given map and returns if found.
- * If not present, creates a new DirEntry, adds to both given
- * map and parent.entries, and returns it.
- */
- protected def getDir(dirs: Map[String, DirEntryInterface], path: String): DirEntryInterface =
- dirs.getOrElseUpdate(path, {
- val (home, name) = splitPath(path init)
- val parent = getDir(dirs, home)
- val dir = DirEntryConstructor(parent, name, path)
- parent.entries(name + path.last) = dir
- dir
- })
-
- override def isDirectory = true
}
-/**
- * This class implements an abstract directory backed by a zip
- * archive. We let the encoding be <code>null</code>, because we behave like
- * a directory.
- *
- * @author Philippe Altherr
- * @version 1.0, 23/03/2004
- */
-final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file) with ZipContainer {
- self =>
-
- type SourceType = AbstractFile
- type CreationType = ZipFile
-
- protected val creationSource = archive
- protected lazy val root = new ZipRootCreator(_.parent)()
- protected def DirEntryConstructor = new DirEntry(_, _, _)
- protected def FileEntryConstructor = new FileEntry(_, _, _, _)
- protected def ZipTravConstructor = new ZipFileIterable(_)
-
- abstract class Entry(
- override val container: AbstractFile,
- name: String,
- path: String
- ) extends VirtualFile(name, path)
- {
- override def underlyingSource = Some(self)
- final override def path = "%s(%s)".format(self, super.path)
- final def archive = self.archive
-
- override def hashCode = super.hashCode + container.hashCode
- override def equals(that : Any) =
- super.equals(that) && (cond(that) {
- case e: Entry => container == e.container
- })
- }
-
- final class DirEntry(
- container: AbstractFile,
- name: String,
- path: String
- ) extends Entry(container, name, path) with DirEntryInterface
- {
- def source = container
- }
+final class URLZipArchive(val url: URL) extends ZipArchive(null) {
+ def iterator = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input)))
+
+ @tailrec def loop() {
+ val zipEntry = in.getNextEntry()
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override val toByteArray: Array[Byte] = {
+ val len = zipEntry.getSize().toInt
+ val arr = new Array[Byte](len)
+ var offset = 0
+
+ def loop() {
+ if (offset < len) {
+ val read = in.read(arr, offset, len - offset)
+ if (read >= 0) {
+ offset += read
+ loop()
+ }
+ }
+ }
+ loop()
- final class FileEntry(
- container: AbstractFile,
- name: String,
- path: String,
- val entry: ZipEntry
- ) extends Entry(container, name, path) with FileEntryInterface
- {
- override def input = archive getInputStream entry
- }
+ if (offset == arr.length) arr
+ else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len))
+ }
+ override def sizeOption = Some(zipEntry.getSize().toInt)
+ }
- class ZipFileIterable(z: ZipFile) extends Iterable[ZipEntry] with ZipTrav {
- def zis: () => ZipInputStream = null // not valid for this type
- def iterator = new Iterator[ZipEntry] {
- val enum = z.entries()
- def hasNext = enum.hasMoreElements
- def next = enum.nextElement
+ if (zipEntry != null) {
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory)
+ dir
+ else {
+ val f = new FileEntry()
+ dir.entries(f.name) = f
+ }
+ in.closeEntry()
+ loop()
+ }
}
- }
-}
-/**
- * This class implements an abstract directory backed by a specified
- * zip archive.
- *
- * @author Stephane Micheloud
- * @version 1.0, 29/05/2007
- */
-final class URLZipArchive(url: URL) extends AbstractFile with ZipContainer {
- type SourceType = InputStream
- type CreationType = InputStream
-
- protected lazy val creationSource = input
- protected lazy val root = new ZipRootCreator(x => byteInputStream(x.traverser.zis()))()
-
- protected def DirEntryConstructor = (_, name, path) => new DirEntry(name, path)
- protected def FileEntryConstructor = new FileEntry(_, _, _, _)
- protected def ZipTravConstructor = new ZipArchive.ZipEntryTraversableClass(_)
+ loop()
+ try root.iterator
+ finally dirs.clear()
+ }
- def name: String = url.getFile()
- def path: String = url.getPath()
- def input: InputStream = url.openStream()
- def absolute: AbstractFile = this
- def lastModified: Long =
+ def name = url.getFile()
+ def path = url.getPath()
+ def input = url.openStream()
+ def lastModified =
try url.openConnection().getLastModified()
catch { case _: IOException => 0 }
- /** Methods we don't support but have to implement because of the design */
- def file: JFile = null
- def create(): Unit = unsupported
- def delete(): Unit = unsupported
- def output = unsupported
- def container = unsupported
-
- abstract class Entry(name: String, path: String) extends VirtualFile(name, path) {
- final override def path = "%s(%s)".format(URLZipArchive.this, super.path)
- override def container = URLZipArchive.this
- }
- final class DirEntry(name: String, path: String) extends Entry(name, path) with DirEntryInterface {
- def source = input
- }
- final class FileEntry(
- val in: InputStream,
- name: String,
- path: String,
- val entry: ZipEntry
- ) extends Entry(name, path) with FileEntryInterface
- {
- override def input = in
- }
-
- /** Private methods **/
- private def byteInputStream(in: InputStream): InputStream = {
- val minusOne = (-1).toByte
- val buf = new BufferedInputStream(in)
- val bytes = Iterator continually in.read().toByte takeWhile (_ != minusOne)
- new ByteArrayInputStream(bytes.toSeq.toArray)
+ override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive]
+ override def hashCode() = url.hashCode
+ override def equals(that: Any) = that match {
+ case x: URLZipArchive => url == x.url
+ case _ => false
}
}
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 29174b161f..565a3d4bcb 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -11,16 +11,7 @@ import java.util.jar.{ Attributes }
package object io {
type JManifest = java.util.jar.Manifest
- private[io] type JFile = java.io.File
- // grimly bulldozing through #4338
- private[io] object JFile {
- import java.io.{ File => JJFile } // the irony of JFile being ambiguous is not overlooked
- val createTempFile = JJFile.createTempFile(_: String, _: String, _: JFile)
- def pathSeparator = JJFile.pathSeparator
- def separator = JJFile.separator
- def separatorChar = JJFile.separatorChar
- def listRoots() = JJFile.listRoots()
- }
+ type JFile = java.io.File
private[io] implicit def installManifestOps(m: JManifest) = new ManifestOps(m)
class ManifestOps(manifest: JManifest) {
def attrs = manifest.getMainAttributes()
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index c5673fced7..beaf63106d 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -113,6 +113,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
x
}
+ private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
+ println(fmt.format(xs: _*) + " == " + x)
+ x
+ }
def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 6b691826a9..83213c498c 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -198,16 +198,14 @@ trait Matrix extends MatrixAdditions {
*/
class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
def sym = lhs
- def valsym = valDef.symbol
- // XXX how will valsym.tpe differ from sym.tpe ?
- def tpe = valsym.tpe
+ def tpe = lhs.tpe
// See #1427 for an example of a crash which occurs unless we retype:
// in that instance there is an existential in the pattern.
- lazy val ident = typer typed { ID(lhs) setType null }
- lazy val valDef = typer typed { (VAL(lhs) withType ident.tpe) === rhs }
+ lazy val ident = typer typed Ident(lhs)
+ lazy val valDef = typer typedValDef ValDef(lhs, rhs)
- override def toString() = "%s: %s = %s".format(lhs, lhs.info, rhs)
+ override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
}
/** Sets the rhs to EmptyTree, which makes the valDef ignored in Scrutinee.
@@ -257,8 +255,5 @@ trait Matrix extends MatrixAdditions {
// careful: pos has special meaning
recordSyntheticSym(owner.newVariable(pos, n) setInfo tpe setFlag (SYNTHETIC.toLong /: flags)(_|_))
}
-
- def typedValDef(x: Symbol, rhs: Tree) =
- tracing("typedVal")(typer typedValDef (VAL(x) === rhs))
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
index c5c57938ca..7024533d9a 100644
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
@@ -19,7 +19,7 @@ trait MatrixAdditions extends ast.TreeDSL {
import symtab.Flags
import CODE._
import Debug._
- import treeInfo.{ IsTrue, IsFalse }
+ import treeInfo._
import definitions.{ isValueClass }
/** The Squeezer, responsible for all the squeezing.
@@ -53,17 +53,6 @@ trait MatrixAdditions extends ast.TreeDSL {
super.traverse(tree)
}
}
- class Subst(vd: ValDef) extends Transformer {
- private var stop = false
- override def transform(tree: Tree): Tree = tree match {
- case t: Ident if t.symbol == vd.symbol =>
- stop = true
- vd.rhs
- case _ =>
- if (stop) tree
- else super.transform(tree)
- }
- }
/** Compresses multiple Blocks. */
private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
@@ -87,9 +76,12 @@ trait MatrixAdditions extends ast.TreeDSL {
val rt = new RefTraverser(vd)
rt.atOwner(owner)(rt traverse squeezedTail)
- if (rt.canDrop) squeezedTail
- else if (rt.canInline) new Subst(vd) transform squeezedTail
- else default
+ if (rt.canDrop)
+ squeezedTail
+ else if (isConstantType(vd.symbol.tpe) || rt.canInline)
+ new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
+ else
+ default
case _ => default
}
}
@@ -103,26 +95,24 @@ trait MatrixAdditions extends ast.TreeDSL {
import self.context._
final def optimize(tree: Tree): Tree = {
+ // Uses treeInfo extractors rather than looking at trees directly
+ // because the many Blocks obscure our vision.
object lxtt extends Transformer {
override def transform(tree: Tree): Tree = tree match {
- case blck @ Block(vdefs, ld @ LabelDef(name, params, body)) =>
- if (targets exists (_ shouldInline ld.symbol)) squeezedBlock(vdefs, body)
- else blck
-
- case t =>
- super.transform(t match {
- // note - it is too early for any other true/false related optimizations
- case If(cond, IsTrue(), IsFalse()) => cond
-
- case If(cond1, If(cond2, thenp, elsep1), elsep2) if (elsep1 equalsStructure elsep2) =>
- IF (cond1 AND cond2) THEN thenp ELSE elsep1
- case If(cond1, If(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
- IF (cond1 AND cond2) THEN thenp ELSE ld
- case t => t
- })
+ case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
+ squeezedBlock(transformStats(stats, currentOwner), body)
+ case IsIf(cond, IsTrue(), IsFalse()) =>
+ transform(cond)
+ case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
+ transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
+ case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
+ transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
+ case _ =>
+ super.transform(tree)
}
}
- returning(lxtt transform tree)(_ => clearSyntheticSyms())
+ try lxtt transform tree
+ finally clearSyntheticSyms()
}
}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index e198656eb5..65e570f133 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -25,7 +25,7 @@ trait ParallelMatching extends ast.TreeDSL
self: ExplicitOuter =>
import global.{ typer => _, _ }
- import definitions.{ AnyRefClass, NothingClass, IntClass, BooleanClass, getProductArgs, productProj }
+ import definitions.{ AnyRefClass, NothingClass, IntClass, BooleanClass, SomeClass, getProductArgs, productProj }
import CODE._
import Types._
import Debug._
@@ -50,10 +50,12 @@ trait ParallelMatching extends ast.TreeDSL
shortCuts(key) = theLabel
-key
}
- def createLabelDef(prefix: String, params: List[Symbol] = Nil, tpe: Type = matchResultType) = {
- val labelSym = owner.newLabel(owner.pos, cunit.freshTermName(prefix)) setInfo MethodType(params, tpe)
+ def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
+ val labelName = cunit.freshTermName(namePrefix)
+ val labelSym = owner.newLabel(owner.pos, labelName)
+ val labelInfo = MethodType(params, restpe)
- (body: Tree) => LabelDef(labelSym, params, body setType tpe)
+ LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
}
/** This is the recursively focal point for translating the current
@@ -297,7 +299,7 @@ trait ParallelMatching extends ast.TreeDSL
literals.zipWithIndex map {
case (lit, index) =>
val tag = lit.intValue
- (tag -> index, tag -> lit.deepBoundVariables)
+ (tag -> index, tag -> lit.boundVariables)
} unzip
)
def literalMap = litPairs groupBy (_._1) map {
@@ -355,8 +357,9 @@ trait ParallelMatching extends ast.TreeDSL
scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
lazy val cond: Tree =
- if (unapplyResult.tpe.isBoolean) ID(unapplyResult.valsym)
- else unapplyResult.valsym IS_DEFINED
+ if (unapplyResult.tpe.isBoolean) unapplyResult.ident
+ else if (unapplyResult.tpe.typeSymbol == SomeClass) TRUE
+ else NOT(unapplyResult.ident DOT nme.isEmpty)
lazy val failure =
mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
@@ -510,7 +513,7 @@ trait ParallelMatching extends ast.TreeDSL
case PseudoType(o) => o
}
private lazy val labelDef =
- createLabelDef("fail%")(remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
+ createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
lazy val cond = handleOuter(rhs MEMBER_== scrut.id)
lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
@@ -665,16 +668,20 @@ trait ParallelMatching extends ast.TreeDSL
def unreached = referenceCount == 0
def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
- protected def maybeCast(lhs: Symbol, rhs: Symbol)(tree: Tree) = {
- if (rhs.tpe <:< lhs.tpe) tree
- else tree AS lhs.tpe
- }
+ // Creates a simple Ident if the symbol's type conforms to
+ // the val definition's type, or a casted Ident if not.
+ private def newValIdent(lhs: Symbol, rhs: Symbol) =
+ if (rhs.tpe <:< lhs.tpe) Ident(rhs)
+ else Ident(rhs) AS lhs.tpe
protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
- VAL(lhs) === maybeCast(lhs, rhs)(Ident(rhs))
+ typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
protected def newValReference(lhs: Symbol, rhs: Symbol) =
- maybeCast(lhs, rhs)(Ident(rhs))
+ typer typed newValIdent(lhs, rhs)
+
+ protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
+ protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference)
protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
params flatMap { lhs =>
@@ -686,12 +693,6 @@ trait ParallelMatching extends ast.TreeDSL
}
}
- protected def valDefsFor(subst: Map[Symbol, Symbol]) =
- mapSubst(subst)(typer typedValDef newValDefinition(_, _))
-
- protected def identsFor(subst: Map[Symbol, Symbol]) =
- mapSubst(subst)(typer typed newValReference(_, _))
-
// typer is not able to digest a body of type Nothing being assigned result type Unit
protected def caseResultType =
if (body.tpe.isNothing) body.tpe else matchResultType
@@ -708,7 +709,7 @@ trait ParallelMatching extends ast.TreeDSL
traceCategory("Final State", "(%s) => %s", paramsString, body)
def label = Some(labelDef)
- private lazy val labelDef = createLabelDef("body%" + bx, params, caseResultType)(body)
+ private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
val tree =
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index 8bba8b559c..bfca609ca7 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -31,6 +31,12 @@ trait PatternBindings extends ast.TreeDSL
// For spotting duplicate unapplies
def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
+ // Reproduce the Bind trees wrapping oldTree around newTree
+ def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
+ case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree))
+ case _ => newTree
+ }
+
// used as argument to `EqualsPatternClass'
case class PseudoType(o: Tree) extends SimpleTypeProxy {
override def underlying: Type = o.tpe
@@ -58,42 +64,20 @@ trait PatternBindings extends ast.TreeDSL
// bound variables beneath them return a list of said patterns for flatMapping.
def subpatternsForVars: List[Pattern] = Nil
- private def shallowBoundVariables = strip(boundTree)
- private def otherBoundVariables = subpatternsForVars flatMap (_.deepBoundVariables)
-
- def deepBoundVariables: List[Symbol] = shallowBoundVariables ::: otherBoundVariables
- // An indiscriminate deep search would be:
- //
- // def deepBoundVariables = deepstrip(boundTree)
-
- lazy val boundVariables = {
- val res = shallowBoundVariables
- val deep = deepBoundVariables
-
- if (res.size != deep.size)
- TRACE("deep variable list %s is larger than bound %s", deep, res)
-
- res
- }
-
- // XXX only a var for short-term experimentation.
- private var _boundTree: Bind = null
- def boundTree = if (_boundTree == null) tree else _boundTree
- def withBoundTree(x: Bind): this.type = {
+ // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
+ private var _boundTree: Tree = tree
+ def boundTree = _boundTree
+ def setBound(x: Bind): Pattern = {
_boundTree = x
- tracing[this.type]("Bound")(this)
+ this
}
+ def boundVariables = strip(boundTree)
// If a tree has bindings, boundTree looks something like
// Bind(v3, Bind(v2, Bind(v1, tree)))
// This takes the given tree and creates a new pattern
// using the same bindings.
- def rebindTo(t: Tree): Pattern = {
- if (boundVariables.size < deepBoundVariables.size)
- TRACE("ALERT: rebinding %s is losing %s", this, otherBoundVariables)
-
- Pattern(wrapBindings(boundVariables, t))
- }
+ def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
// Wrap this pattern's bindings around (_: Type)
def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 3ea1b1cdb2..3c629e5504 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -52,7 +52,7 @@ trait Patterns extends ast.TreeDSL {
// 8.1.1
case class VariablePattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
+ lazy val Ident(name) = tree
require(isVarPattern(tree) && name != nme.WILDCARD)
override def description = "%s".format(name)
@@ -60,27 +60,26 @@ trait Patterns extends ast.TreeDSL {
// 8.1.1 (b)
case class WildcardPattern() extends Pattern {
- val tree = EmptyTree
+ def tree = EmptyTree
override def isDefault = true
override def description = "_"
}
// 8.1.2
case class TypedPattern(tree: Typed) extends Pattern {
- private val Typed(expr, tpt) = tree
- private lazy val exprPat = Pattern(expr)
+ lazy val Typed(expr, tpt) = tree
override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
override def simplify(pv: PatternVar) = Pattern(expr) match {
case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
case _ => this
}
- override def description = "%s: %s".format(exprPat.boundNameString, tpt)
+ override def description = "%s: %s".format(Pattern(expr), tpt)
}
// 8.1.3
case class LiteralPattern(tree: Literal) extends Pattern {
- val Literal(const @ Constant(value)) = tree
+ lazy val Literal(const @ Constant(value)) = tree
def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
def intValue = const.intValue
@@ -94,7 +93,7 @@ trait Patterns extends ast.TreeDSL {
case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
// XXX - see bug 3411 for code which violates this assumption
// require (!isVarPattern(fn) && args.isEmpty)
- val ident @ Ident(name) = fn
+ lazy val ident @ Ident(name) = fn
override def sufficientType = Pattern(ident).equalsCheck
override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
@@ -103,7 +102,7 @@ trait Patterns extends ast.TreeDSL {
// 8.1.4 (b)
case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
require (args.isEmpty)
- val Apply(select: Select, _) = tree
+ lazy val Apply(select: Select, _) = tree
override lazy val sufficientType = qualifier.tpe match {
case t: ThisType => singleType(t, sym) // this.X
@@ -138,7 +137,7 @@ trait Patterns extends ast.TreeDSL {
}
// 8.1.4 (e)
case class SimpleIdPattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
+ lazy val Ident(name) = tree
override def description = "Id(%s)".format(name)
}
@@ -190,35 +189,38 @@ trait Patterns extends ast.TreeDSL {
// Special List handling. It was like that when I got here.
case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
- private val cons = ConsClass.primaryConstructor.tpe.resultType
- private val consRef = typeRef(cons.prefix, ConsClass, List(tpt.tpe))
- private val listRef = typeRef(cons.prefix, ListClass, List(tpt.tpe))
- private val seqRef = typeRef(cons.prefix, SeqClass, List(tpt.tpe))
+ // As yet I can't testify this is doing any good relative to using
+ // tpt.tpe, but it doesn't seem to hurt either.
+ private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
+ private lazy val consRef = typeRef(NoPrefix, ConsClass, List(packedType))
+ private lazy val listRef = typeRef(NoPrefix, ListClass, List(packedType))
+ private lazy val seqRef = typeRef(NoPrefix, SeqClass, List(packedType))
+
private def thisSeqRef = {
val tc = (tree.tpe baseType SeqClass).typeConstructor
- if (tc.typeParams.size == 1) appliedType(tc, List(tpt.tpe))
+ if (tc.typeParams.size == 1) appliedType(tc, List(packedType))
else seqRef
}
// Fold a list into a well-typed x :: y :: etc :: tree.
- private def listFolder(x: Pattern, xs: Pattern): Pattern = x match {
- case Pattern(Star(_)) => x rebindTo WILD(x.tpe)
- case _ =>
+ private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
+ case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
+ case _ =>
val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "matching$dummy")
- val consType = MethodType(dummyMethod newSyntheticValueParams List(tpt.tpe, listRef), consRef)
+ val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
- Pattern(Apply(TypeTree(consType), List(x.boundTree, xs.boundTree)) setType consRef)
+ Apply(TypeTree(consType), List(hd, tl)) setType consRef
}
- private def foldedPatterns = elems.foldRight(NilPattern)((x, y) => listFolder(Pattern(x), y))
+ private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
override def simplify(pv: PatternVar) = {
if (pv.tpe <:< necessaryType)
- foldedPatterns
+ Pattern(foldedPatterns)
else
this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
}
- override def description = "List(%s => %s)".format(tpt.tpe, resTypesString)
+ override def description = "List(%s => %s)".format(packedType, resTypesString)
}
trait SequenceLikePattern extends Pattern {
@@ -241,12 +243,12 @@ trait Patterns extends ast.TreeDSL {
// 8.1.8 (c)
case class StarPattern(tree: Star) extends Pattern {
- val Star(elem) = tree
+ lazy val Star(elem) = tree
override def description = "_*"
}
// XXX temporary?
case class ThisPattern(tree: This) extends NamePattern {
- val This(name) = tree
+ lazy val This(name) = tree
override def description = "this"
}
@@ -277,7 +279,7 @@ trait Patterns extends ast.TreeDSL {
return cache(tree)
val p = tree match {
- case x: Bind => apply(unbind(tree)) withBoundTree x
+ case x: Bind => apply(unbind(tree)) setBound x
case EmptyTree => WildcardPattern()
case Ident(nme.WILDCARD) => WildcardPattern()
case x @ Alternative(ps) => AlternativePattern(x)
@@ -409,7 +411,7 @@ trait Patterns extends ast.TreeDSL {
}
sealed trait ApplyPattern extends Pattern {
- protected lazy val Apply(fn, args) = tree
+ lazy val Apply(fn, args) = tree
override def subpatternsForVars: List[Pattern] = toPats(args)
override def dummies =
@@ -420,7 +422,7 @@ trait Patterns extends ast.TreeDSL {
}
sealed abstract class Pattern extends PatternBindingLogic {
- val tree: Tree
+ def tree: Tree
// returns either a simplification of this pattern or identity.
def simplify(pv: PatternVar): Pattern = this
@@ -455,11 +457,6 @@ trait Patterns extends ast.TreeDSL {
tree setType tpe
this
}
- def boundName: Option[Name] = boundTree match {
- case Bind(name, _) => Some(name)
- case _ => None
- }
- def boundNameString = "" + (boundName getOrElse "_")
def equalsCheck =
tracing("equalsCheck")(
@@ -474,14 +471,9 @@ trait Patterns extends ast.TreeDSL {
}
override def hashCode() = boundTree.hashCode()
def description = super.toString()
- def bindingsDescription =
- if (boundTree.isEmpty) ""
- else (boundVariables map (_.name)).mkString("", ", ", " @ ")
- final override def toString() = {
- if (boundVariables.isEmpty) description
- else "%s%s".format(bindingsDescription, description)
- }
+ final override def toString() = description
+
def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
}
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index 7d2c0fe424..5127c2eb19 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -14,6 +14,10 @@ import scala.tools.nsc.util.Position
* This reporter implements filtering.
*/
abstract class AbstractReporter extends Reporter {
+ val settings: Settings
+ def display(pos: Position, msg: String, severity: Severity): Unit
+ def displayPrompt(): Unit
+
private val positions = new HashMap[Position, Severity]
override def reset() {
@@ -21,17 +25,13 @@ abstract class AbstractReporter extends Reporter {
positions.clear
}
- val settings: Settings
private def isVerbose = settings.verbose.value
private def noWarnings = settings.nowarnings.value
private def isPromptSet = settings.prompt.value
- def display(pos: Position, msg: String, severity: Severity): Unit
- def displayPrompt(): Unit
-
protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
val severity =
- if (settings.Xwarnfatal.value && _severity == WARNING) ERROR
+ if (settings.fatalWarnings.value && _severity == WARNING) ERROR
else _severity
if (severity == INFO) {
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 75c25afa5b..a6e661c4b4 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -6,14 +6,17 @@
package scala.tools.nsc
package reporters
-import java.io.{BufferedReader, InputStreamReader, IOException, PrintWriter}
+import java.io.{ BufferedReader, IOException, PrintWriter }
import util._
+import scala.tools.util.StringOps.countElementsAsString
/**
* This class implements a Reporter that displays messages on a text
* console.
*/
class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter) extends AbstractReporter {
+ def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true))
+
/** Whether a short file name should be displayed before errors */
var shortname: Boolean = false
@@ -31,9 +34,6 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
if (label0 eq null) "" else label0 + ": "
}
- def this(settings: Settings) =
- this(settings, Console.in, new PrintWriter(Console.err, true))
-
/** Returns the number of errors issued totally as a string.
*
* @param severity ...
@@ -43,7 +43,6 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
countElementsAsString((severity).count, label(severity))
/** Prints the message. */
- //def printMessage(msg: String) { writer.println(msg) } // platform-dependent!
def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
/** Prints the message with the given position indication. */
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index e253a8f5f0..063181b061 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -7,48 +7,22 @@ package scala.tools.nsc
package reporters
import scala.tools.nsc.util._
+import scala.tools.util.StringOps._
/**
* This interface provides methods to issue information, warning and
* error messages.
*/
abstract class Reporter {
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit
+
object severity extends Enumeration
class Severity(val id: Int) extends severity.Value {
var count: Int = 0
}
- val INFO = new Severity(0)
+ val INFO = new Severity(0)
val WARNING = new Severity(1)
- val ERROR = new Severity(2)
-
- def reset() {
- INFO.count = 0
- ERROR.count = 0
- WARNING.count = 0
- cancelled = false
- }
-
- var cancelled: Boolean = false
- def hasErrors: Boolean = ERROR.count > 0 || cancelled
- def hasWarnings: Boolean = WARNING.count > 0
-
- /** Flush all output */
- def flush() { }
-
- protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit
-
- private var source: SourceFile = _
- def setSource(source: SourceFile) { this.source = source }
- def getSource: SourceFile = source
- def withSource[A](src: SourceFile)(op: => A) = {
- val oldSource = source
- try {
- source = src
- op
- } finally {
- source = oldSource
- }
- }
+ val ERROR = new Severity(2)
/** Whether very long lines can be truncated. This exists so important
* debugging information (like printing the classpath) is not rendered
@@ -63,64 +37,33 @@ abstract class Reporter {
finally _truncationOK = saved
}
+ private var incompleteHandler: (Position, String) => Unit = null
+ def incompleteHandled = incompleteHandler != null
+ def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = {
+ val saved = incompleteHandler
+ incompleteHandler = handler
+ try thunk
+ finally incompleteHandler = saved
+ }
+
+ var cancelled = false
+ def hasErrors = ERROR.count > 0 || cancelled
+ def hasWarnings = WARNING.count > 0
+
def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
def warning(pos: Position, msg: String ) { info0(pos, msg, WARNING, false) }
def error(pos: Position, msg: String ) { info0(pos, msg, ERROR, false) }
-
- def comment(pos: Position, msg: String) {}
-
- /** An error that could possibly be fixed if the unit were longer.
- * This is used only when the interpreter tries
- * to distinguish fatal errors from those that are due to
- * needing more lines of input from the user.
- *
- * Should be re-factored into a subclass.
- */
- var incompleteInputError: (Position, String) => Unit = error
- var incompleteHandled: Boolean = false
-
- def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = {
- val savedHandler = incompleteInputError
- val savedHandled = incompleteHandled
- try {
- incompleteInputError = handler
- incompleteHandled = true
- thunk
- } finally {
- incompleteInputError = savedHandler
- incompleteHandled = savedHandled
- }
+ def incompleteInputError(pos: Position, msg: String ) {
+ if (incompleteHandled) incompleteHandler(pos, msg)
+ else error(pos, msg)
}
- // @M: moved here from ConsoleReporter and made public -- also useful in e.g. Typers
- /** Returns a string meaning "n elements".
- *
- * @param n ...
- * @param elements ...
- * @return ...
- */
- def countElementsAsString(n: Int, elements: String): String =
- n match {
- case 0 => "no " + elements + "s"
- case 1 => "one " + elements
- case 2 => "two " + elements + "s"
- case 3 => "three " + elements + "s"
- case 4 => "four " + elements + "s"
- case _ => "" + n + " " + elements + "s"
- }
-
- /** Turns a count into a friendly English description if n<=4.
- *
- * @param n ...
- * @return ...
- */
- def countAsString(n: Int): String =
- n match {
- case 0 => "none"
- case 1 => "one"
- case 2 => "two"
- case 3 => "three"
- case 4 => "four"
- case _ => "" + n
- }
+ def comment(pos: Position, msg: String) { }
+ def flush() { }
+ def reset() {
+ INFO.count = 0
+ ERROR.count = 0
+ WARNING.count = 0
+ cancelled = false
+ }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 9f0e55c422..48601c325a 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -6,10 +6,8 @@
package scala.tools.nsc
package reporters
-import java.io.{BufferedReader, InputStreamReader, IOException, PrintWriter}
-
-import scala.collection.mutable.HashSet
-import scala.tools.nsc.util.{Position, SourceFile}
+import scala.collection.mutable
+import scala.tools.nsc.util.Position
/**
* This class implements a Reporter that displays messages on a text
@@ -19,7 +17,7 @@ class StoreReporter extends Reporter {
class Info(val pos: Position, val msg: String, val severity: Severity) {
override def toString() = "pos: " + pos + " " + msg + " " + severity
}
- val infos = new HashSet[Info]
+ val infos = new mutable.HashSet[Info]
protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (!force) {
infos += new Info(pos, msg, severity)
@@ -28,7 +26,7 @@ class StoreReporter extends Reporter {
}
override def reset() {
- super.reset
- infos.clear
+ super.reset()
+ infos.clear()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
index 0908ea60b6..136e03d9e2 100644
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
@@ -22,7 +22,7 @@ trait AestheticSettings {
def declsOnly = false
def deprecation = settings.deprecation.value
def experimental = settings.Xexperimental.value
- def fatalWarnings = settings.Xwarnfatal.value
+ def fatalWarnings = settings.fatalWarnings.value
def logClasspath = settings.Ylogcp.value
def printStats = settings.Ystatistics.value
def richExes = settings.YrichExes.value || sys.props.traceSourcePath.isSet
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 87398e4117..a1ddb10b5f 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -412,6 +412,8 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
def unparse: List[String] =
if (value == default) Nil
else List(name, value.toString)
+
+ withHelpSyntax(name + " <n>")
}
/** A setting represented by a boolean flag (false, unless set) */
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index ce1236d788..237489b440 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -12,7 +12,9 @@ import annotation.elidable
import scala.tools.util.PathResolver.Defaults
import scala.collection.mutable.HashSet
-trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
+trait ScalaSettings extends AbsScalaSettings
+ with StandardScalaSettings
+ with Warnings {
self: MutableSettings =>
import Defaults.scalaUserClassPath
@@ -61,6 +63,7 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
+ val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode.")
val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
@@ -78,9 +81,6 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- val Xwarnfatal = BooleanSetting ("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
- val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference.")
-
// Experimental Extensions
val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions.") .
withPostSetHook(set => List(YdepMethTpes, YmethodInfer) foreach (_.value = set.value)) //YvirtClasses,
@@ -108,6 +108,8 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.")
// val doc = BooleanSetting ("-Ydoc", "Generate documentation")
+ val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts",
+ List("package", "object", "error"), "error")
val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
val log = PhasesSetting ("-Ylog", "Log operations during")
@@ -142,6 +144,7 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
val Ybuildmanagerdebug =
BooleanSetting ("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignments.")
+ val Yinferdebug = BooleanSetting ("-Yinfer-debug", "Trace type inference and implicit search.")
val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") .
@@ -160,11 +163,6 @@ trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
def stop = stopAfter
/**
- * Warnings
- */
- val Ywarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
-
- /**
* IDE-specific settings
*/
val YpresentationVerbose = BooleanSetting("-Ypresentation-verbose", "Print information about presentation compiler tasks.")
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
new file mode 100644
index 0000000000..b11c6f00ee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -0,0 +1,62 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package nsc
+package settings
+
+import annotation.elidable
+import scala.tools.util.PathResolver.Defaults
+import scala.collection.mutable.HashSet
+
+/** Settings influencing the printing of warnings.
+ */
+trait Warnings {
+ self: MutableSettings =>
+
+ // Warning semantics.
+ val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
+
+ // These warnings are all so noisy as to be useless in their
+ // present form, but have the potential to offer useful info.
+ protected def allWarnings = lintWarnings ++ List(
+ warnSelectNullable,
+ warnValueDiscard,
+ warnNumericWiden
+ )
+ // These warnings should be pretty quiet unless you're doing
+ // something inadvisable.
+ protected def lintWarnings = List(
+ warnDeadCode,
+ warnInaccessible,
+ warnNullaryOverride,
+ warnNullaryUnit
+ )
+
+ // Warning groups.
+ val lint = (
+ BooleanSetting("-Xlint", "Enable recommended additional warnings.")
+ withPostSetHook (_ => lintWarnings foreach (_.value = true))
+ )
+ val warnEverything = (
+ BooleanSetting("-Ywarn-all", "Enable all -Y warnings.")
+ withPostSetHook (_ => lintWarnings foreach (_.value = true))
+ )
+
+ // Individual warnings.
+ val warnSelectNullable = BooleanSetting ("-Xcheck-null", "Warn upon selection of nullable reference.")
+ val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.")
+ val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
+ val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
+ val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.")
+ val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
+ val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override",
+ "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+
+ // Backward compatibility.
+ def Xwarnfatal = fatalWarnings
+ def Xchecknull = warnSelectNullable
+ def Ywarndeadcode = warnDeadCode
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index b024d550b7..3594e48323 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -92,7 +92,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
}
// System.out.println("Browsing "+src)
- val source = new BatchSourceFile(src)
+ val source = getSourceFile(src) // this uses the current encoding
val body = new OutlineParser(source).parse()
// System.out.println(body)
val browser = new BrowserTraverser
@@ -111,7 +111,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
browseTopLevel(root, src)
} catch {
case ex: syntaxAnalyzer.MalformedInput =>
- println("caught malformed input exception at offset "+ex.offset+": "+ex.msg)
+ println("[%s] caught malformed input exception at offset %d: %s".format(src, ex.offset, ex.msg))
super.enterToplevelsFromSource(root, name, src)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 005a3c87ed..cbf73463f3 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -249,6 +249,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
def ensureAccessibleMethod = getMember(ScalaRunTimeModule, "ensureAccessible")
def scalaRuntimeHash = getMember(ScalaRunTimeModule, "hash")
+ def scalaRuntimeAnyValClass = getMember(ScalaRunTimeModule, "anyValClass")
def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
// classes with special meanings
@@ -316,7 +317,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
lazy val TraversableClass = getClass("scala.collection.Traversable")
lazy val ListModule = getModule("scala.collection.immutable.List")
- def List_apply = getMember(ListModule, nme.apply)
+ lazy val List_apply = getMember(ListModule, nme.apply)
lazy val NilModule = getModule("scala.collection.immutable.Nil")
lazy val SeqModule = getModule("scala.collection.Seq")
@@ -527,6 +528,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
var Any_equals : Symbol = _
var Any_hashCode : Symbol = _
var Any_toString : Symbol = _
+ var Any_getClass : Symbol = _
var Any_isInstanceOf: Symbol = _
var Any_asInstanceOf: Symbol = _
var Any_## : Symbol = _
@@ -727,6 +729,7 @@ trait Definitions extends reflect.generic.StandardDefinitions {
/** Is symbol a value class? */
def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
def isNonUnitValueClass(sym: Symbol) = (sym != UnitClass) && isValueClass(sym)
+ def isScalaValueType(tp: Type) = scalaValueClassesSet(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
def isBoxedValueClass(sym: Symbol) = boxedValueClassesSet(sym)
@@ -810,11 +813,27 @@ trait Definitions extends reflect.generic.StandardDefinitions {
// members of class scala.Any
Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype) setFlag FINAL
Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype) setFlag FINAL
- Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
+ Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
- Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
-
+ Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
+
+ // Any_getClass requires special handling. The return type is determined on
+ // a per-call-site basis as if the function being called were actually:
+ //
+ // // Assuming `target.getClass()`
+ // def getClass[T](target: T): Class[_ <: T]
+ //
+ // Since getClass is not actually a polymorphic method, this requires compiler
+ // participation. At the "Any" level, the return type is Class[_] as it is in
+ // java.lang.Object. Java also special cases the return type.
+ Any_getClass = {
+ val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
+ eparams.head setInfo TypeBounds.empty
+ val tpe = existentialAbstraction(eparams, appliedType(ClassClass.tpe, List(eparams.head.tpe)))
+
+ newMethod(AnyClass, nme.getClass_, Nil, tpe) setFlag DEFERRED
+ }
Any_isInstanceOf = newPolyMethod(
AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL
Any_asInstanceOf = newPolyMethod(
diff --git a/src/compiler/scala/tools/nsc/symtab/Names.scala b/src/compiler/scala/tools/nsc/symtab/Names.scala
index 7eccb9a49f..a51e0a26b1 100644
--- a/src/compiler/scala/tools/nsc/symtab/Names.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Names.scala
@@ -96,8 +96,10 @@ trait Names extends reflect.generic.Names {
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1].
*/
- def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName =
- newTermName(Codec fromUTF8 bs.slice(offset, offset + len) mkString)
+ def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+ val chars = Codec fromUTF8 bs.slice(offset, offset + len)
+ newTermName(chars, 0, chars.length)
+ }
/** Create a type name from the characters in cs[offset..offset+len-1].
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index 4a7f52f2b5..e114731708 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -360,7 +360,6 @@ trait StdNames extends reflect.generic.StdNames with NameManglers {
val EXCEPTION_RESULT_PREFIX = "exceptionResult"
val INTERPRETER_IMPORT_WRAPPER = "$iw"
val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_SYNTHVAR_PREFIX = "synthvar$"
val INTERPRETER_VAR_PREFIX = "res"
val INTERPRETER_WRAPPER_SUFFIX = "$object"
val WHILE_PREFIX = "while$"
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 3536e79a76..13ec75d04d 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -98,7 +98,7 @@ abstract class SymbolLoaders {
private var ok = false
private def setSource(sym: Symbol) {
- sourcefile map (sf => sym match {
+ sourcefile foreach (sf => sym match {
case cls: ClassSymbol => cls.sourceFile = sf
case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf
case _ => ()
@@ -151,9 +151,32 @@ abstract class SymbolLoaders {
def enterPackage(root: Symbol, name: String, completer: SymbolLoader) {
val preExisting = root.info.decls.lookup(newTermName(name))
- if (preExisting != NoSymbol)
- throw new TypeError(
- root+" contains object and package with same name: "+name+"\none of them needs to be removed from classpath")
+ if (preExisting != NoSymbol) {
+ // Some jars (often, obfuscated ones) include a package and
+ // object with the same name. Rather than render them unusable,
+ // offer a setting to resolve the conflict one way or the other.
+ // This was motivated by the desire to use YourKit probes, which
+ // require yjp.jar at runtime. See SI-2089.
+ if (settings.termConflict.isDefault)
+ throw new TypeError(
+ root+" contains object and package with same name: "+
+ name+"\none of them needs to be removed from classpath"
+ )
+ else if (settings.termConflict.value == "package") {
+ global.warning(
+ "Resolving package/object name conflict in favor of package " +
+ preExisting.fullName + ". The object will be inaccessible."
+ )
+ root.info.decls.unlink(preExisting)
+ }
+ else {
+ global.warning(
+ "Resolving package/object name conflict in favor of object " +
+ preExisting.fullName + ". The package will be inaccessible."
+ )
+ return
+ }
+ }
val pkg = root.newPackage(NoPosition, newTermName(name))
pkg.moduleClass.setInfo(completer)
pkg.setInfo(pkg.moduleClass.tpe)
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index d3decb0abc..9aa5ce0b5c 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -15,7 +15,6 @@ abstract class SymbolTable extends reflect.generic.Universe
with Symbols
with Types
with Scopes
- with Caches
with Definitions
with reflect.generic.Constants
with BaseTypeSeqs
@@ -31,7 +30,7 @@ abstract class SymbolTable extends reflect.generic.Universe
{
def settings: Settings
def rootLoader: LazyType
- def log(msg: => AnyRef)
+ def log(msg: => AnyRef): Unit
def abort(msg: String) = throw new Error(msg)
def abort() = throw new Error()
@@ -99,6 +98,25 @@ abstract class SymbolTable extends reflect.generic.Universe
final def afterPhase[T](ph: Phase)(op: => T): T =
atPhase(ph.next)(op)
+ final def isValid(period: Period): Boolean =
+ period != 0 && runId(period) == currentRunId && {
+ val pid = phaseId(period)
+ if (phase.id > pid) infoTransformers.nextFrom(pid).pid >= phase.id
+ else infoTransformers.nextFrom(phase.id).pid >= pid
+ }
+
+ final def isValidForBaseClasses(period: Period): Boolean = {
+ def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
+ it.pid >= limit ||
+ !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
+ );
+ period != 0 && runId(period) == currentRunId && {
+ val pid = phaseId(period)
+ if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
+ else noChangeInBaseClasses(infoTransformers.nextFrom(phase.id), pid)
+ }
+ }
+
/** Break into repl debugger if assertion is true */
// def breakIf(assertion: => Boolean, args: Any*): Unit =
// if (assertion)
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 308eeeb590..106c5f0c73 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -406,7 +406,9 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
var is = infos
(is eq null) || {
while (is.prev ne null) { is = is.prev }
- is.info.isComplete && is.info.typeParams.isEmpty
+ is.info.isComplete && !is.info.isHigherKinded // was: is.info.typeParams.isEmpty.
+ // YourKit listed the call to PolyType.typeParams as a hot spot but it is likely an artefact.
+ // The change to isHigherKinded did not reduce the total running time.
}
}
@@ -433,8 +435,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
(name startsWith nme.INTERPRETER_LINE_PREFIX) &&
(name endsWith nme.INTERPRETER_WRAPPER_SUFFIX)
- override def isEffectiveRoot = super.isEffectiveRoot || isInterpreterWrapper
-
/** Is this symbol an accessor method for outer? */
final def isOuterAccessor = {
hasFlag(STABLE | SYNTHETIC) &&
@@ -516,14 +516,7 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
final def isLocal: Boolean = owner.isTerm
/** Is this symbol a constant? */
- final def isConstant: Boolean =
- isStable && (tpe match {
- case ConstantType(_) => true
- case PolyType(_, ConstantType(_)) => true
- case MethodType(_, ConstantType(_)) => true
- case NullaryMethodType(ConstantType(_)) => true
- case _ => false
- })
+ final def isConstant: Boolean = isStable && isConstantType(tpe.resultType)
/** Is this class nested in another class or module (not a package)? */
final def isNestedClass: Boolean =
@@ -699,9 +692,11 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
* @M you should use tpeHK for a type symbol with type parameters if
* the kind of the type need not be *, as tpe introduces dummy arguments
* to generate a type of kind *
- * for a term symbol, its usual type
+ * for a term symbol, its usual type.
+ * See the tpe/tpeHK overrides in TypeSymbol for more.
*/
override def tpe: Type = info
+ def tpeHK: Type = tpe
/** Get type info associated with symbol at current phase, after
* ensuring that symbol is initialized (i.e. type is completed).
@@ -881,15 +876,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
def typeConstructor: Type =
abort("typeConstructor inapplicable for " + this)
- /** @M -- tpe vs tpeHK:
- * Symbol::tpe creates a TypeRef that has dummy type arguments to get a type of kind *
- * Symbol::tpeHK creates a TypeRef without type arguments, but with type params --> higher-kinded if non-empty list of tpars
- * calling tpe may hide errors or introduce spurious ones
- * (e.g., when deriving a type from the symbol of a type argument that must be higher-kinded)
- * as far as I can tell, it only makes sense to call tpe in conjunction with a substitution that replaces the generated dummy type arguments by their actual types
- */
- def tpeHK = if (isType) typeConstructor else tpe // @M! used in memberType
-
/** The type parameters of this symbol, without ensuring type completion.
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters later.
@@ -1032,13 +1018,13 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
this == that || this.isError || that.isError ||
info.baseTypeIndex(that) >= 0
- final def isSubClass(that: Symbol): Boolean = {
+ final def isSubClass(that: Symbol): Boolean = (
isNonBottomSubClass(that) ||
this == NothingClass ||
this == NullClass &&
(that == AnyClass ||
- that != NothingClass && (that isSubClass AnyRefClass))
- }
+ that != NothingClass && (that isSubClass ObjectClass))
+ )
final def isNumericSubClass(that: Symbol): Boolean =
definitions.isNumericSubClass(this, that)
@@ -1914,7 +1900,8 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
tpeCache = NoType
val targs =
if (phase.erasedTypes && this != ArrayClass) List()
- else unsafeTypeParams map (_.typeConstructor) //@M! use typeConstructor to generate dummy type arguments,
+ else unsafeTypeParams map (_.typeConstructor)
+ //@M! use typeConstructor to generate dummy type arguments,
// sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
// memberType should be used instead, that's why it uses tpeHK and not tpe
tpeCache = newTypeRef(targs)
@@ -1924,6 +1911,22 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
tpeCache
}
+ /** @M -- tpe vs tpeHK:
+ *
+ * tpe: creates a TypeRef with dummy type arguments and kind *
+ * tpeHK: creates a TypeRef with no type arguments but with type parameters
+ *
+ * If typeParams is nonEmpty, calling tpe may hide errors or
+ * introduce spurious ones. (For example, when deriving a type from
+ * the symbol of a type argument that must be higher-kinded.) As far
+ * as I can tell, it only makes sense to call tpe in conjunction
+ * with a substitution that replaces the generated dummy type
+ * arguments by their actual types.
+ *
+ * TODO: the above conditions desperately need to be enforced by code.
+ */
+ override def tpeHK = typeConstructor // @M! used in memberType
+
// needed for experimental code for early types as type parameters
// def refreshType() { tpePeriod = NoPeriod }
@@ -1954,11 +1957,12 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
* info for T in Test1 should be >: Nothing <: Test3[_]
*/
protected def doCookJavaRawInfo() {
- // don't require isJavaDefined, since T in the above example does not have that flag
- val tpe1 = rawToExistential(info)
- // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
- if (tpe1 ne info) {
- setInfo(tpe1)
+ if (isJavaDefined || owner.isJavaDefined) {
+ val tpe1 = rawToExistential(info)
+ // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
+ if (tpe1 ne info) {
+ setInfo(tpe1)
+ }
}
}
@@ -1998,7 +2002,8 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
/** If type skolem comes from an existential, the tree where it was created */
override def unpackLocation = origin
- override def typeParams = info.typeParams //@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
+ //@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
+ override def typeParams = info.typeParams
override def cloneSymbolImpl(owner: Symbol): Symbol =
new TypeSkolem(owner, pos, name, origin)
@@ -2008,7 +2013,6 @@ trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
else super.nameString
}
-
/** A class for class symbols */
class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
diff --git a/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala b/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala
index 62e812704b..f7cb430d7f 100644
--- a/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala
+++ b/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala
@@ -12,7 +12,33 @@ trait TypeDebugging {
import definitions._
// @M toString that is safe during debugging (does not normalize, ...)
- object TypeDebugStrings {
+ object typeDebug {
+ private def to_s(x: Any): String = x match {
+ // otherwise case classes are caught looking like products
+ case _: Tree | _: Type => "" + x
+ case x: TraversableOnce[_] => x mkString ", "
+ case x: Product => x.productIterator mkString ("(", ", ", ")")
+ case _ => "" + x
+ }
+ def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ")
+ def ptBlock(label: String, pairs: (String, Any)*): String = {
+ val width = pairs map (_._1.length) max
+ val fmt = "%-" + (width + 1) + "s %s"
+ val strs = pairs map { case (k, v) => fmt.format(k, to_s(v)) }
+
+ strs.mkString(label + " {\n ", "\n ", "\n}")
+ }
+ def ptLine(label: String, pairs: (String, Any)*): String = {
+ val strs = pairs map { case (k, v) => k + "=" + to_s(v) }
+ strs.mkString(label + ": ", ", ", "")
+ }
+ def ptTree(t: Tree) = t match {
+ case PackageDef(pid, _) => "package " + pid
+ case ModuleDef(_, name, _) => "object " + name
+ case ClassDef(_, name, tparams, _) => "class " + name + str.brackets(tparams)
+ case _ => to_s(t)
+ }
+
object str {
def parentheses(xs: List[_]): String = xs.mkString("(", ", ", ")")
def brackets(xs: List[_]): String = if (xs.isEmpty) "" else xs.mkString("[", ", ", "]")
@@ -58,15 +84,12 @@ trait TypeDebugging {
case TypeBounds(lo, hi) => ">: "+ debug(lo) +" <: "+ debug(hi)
case tv @ TypeVar(_, _) => tv.toString
case ExistentialType(tparams, qtpe) => "forSome "+ str.brackets(tparams) + " " + debug(qtpe)
- case _ => tp.toString
+ case _ => "?"+tp.getClass.getName+"?"//tp.toString might produce cyclic error...
}
def debugString(tp: Type) = debug(tp)
}
- private def TDS = TypeDebugStrings
-
- def paramString(tp: Type) = TDS.str parentheses (tp.params map (_.defString))
- def typeParamsString(tp: Type) = TDS.str brackets (tp.typeParams map (_.defString))
- def typeArgsString(tp: Type) = TDS.str brackets (tp.typeArgs map (_.safeToString))
- def debugString(tp: Type) = TDS debugString tp
+ def paramString(tp: Type) = typeDebug.str parentheses (tp.params map (_.defString))
+ def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString))
+ def typeArgsString(tp: Type) = typeDebug.str brackets (tp.typeArgs map (_.safeToString))
+ def debugString(tp: Type) = typeDebug debugString tp
}
-
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 87fc2b4963..a490e0272a 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -92,6 +92,8 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
private final val printLubs = false
+ /** In case anyone wants to turn off lub verification without reverting anything. */
+ private final val verifyLubs = true
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix
@@ -577,7 +579,8 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
* symbols `from' in this type.
*/
def subst(from: List[Symbol], to: List[Type]): Type =
- new SubstTypeMap(from, to) apply this
+ if (from.isEmpty) this
+ else new SubstTypeMap(from, to) apply this
/** Substitute symbols `to' for occurrences of symbols
* `from' in this type.
@@ -842,7 +845,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- val suspension = TypeVar.Suspension
+ var suspension: mutable.HashSet[TypeVar] = null
// if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
// replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
// without this, the matchesType call would lead to type variables on both sides
@@ -859,10 +862,12 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
// For now I modified it as below, which achieves the same without error.
//
// make each type var in this type use its original type for comparisons instead of collecting constraints
+ val susp = new mutable.HashSet[TypeVar] // use a local val so it remains unboxed
this foreach {
- case tv: TypeVar => suspension suspend tv
- case _ => ()
+ case tv: TypeVar => tv.suspended = true; susp += tv
+ case _ =>
}
+ suspension = susp
}
incCounter(findMemberCount)
@@ -894,7 +899,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
(bcs0.head.hasTransOwner(bcs.head)))) {
if (name.isTypeName || stableOnly && sym.isStable) {
stopTimer(findMemberNanos, start)
- suspension.resumeAll
+ if (suspension ne null) suspension foreach (_.suspended = false)
return sym
} else if (member == NoSymbol) {
member = sym
@@ -911,12 +916,14 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
}
} else {
var prevEntry = members.lookupEntry(sym.name)
+ var symtpe: Type = null
while ((prevEntry ne null) &&
!(prevEntry.sym == sym ||
prevEntry.sym.owner != sym.owner &&
!sym.hasFlag(PRIVATE) && {
if (self eq null) self = this.narrow
- self.memberType(prevEntry.sym) matches self.memberType(sym)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ self.memberType(prevEntry.sym) matches symtpe
})) {
prevEntry = members lookupNextEntry prevEntry
}
@@ -936,7 +943,7 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
excluded = excludedFlags
} // while (continue)
stopTimer(findMemberNanos, start)
- suspension.resumeAll
+ if (suspension ne null) suspension foreach (_.suspended = false)
if (members eq null) {
if (member == NoSymbol) incCounter(noMemberCount)
member
@@ -1129,18 +1136,20 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "ThisType"
}
+ final class UniqueThisType(sym: Symbol) extends ThisType(sym) with UniqueType { }
object ThisType extends ThisTypeExtractor {
- def apply(sym: Symbol): Type =
- if (!phase.erasedTypes) unique(new ThisType(sym) with UniqueType)
+ def apply(sym: Symbol): Type = {
+ if (!phase.erasedTypes) unique(new UniqueThisType(sym))
else if (sym.isImplClass) sym.typeOfThis
else sym.tpe
+ }
}
/** A class for singleton types of the form <prefix>.<sym.name>.type.
* Cannot be created directly; one should always use
* `singleType' for creation.
*/
- case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
+ abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
override val isTrivial: Boolean = pre.isTrivial
// override def isNullable = underlying.isNullable
override def isNotNull = underlying.isNotNull
@@ -1185,7 +1194,12 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "SingleType"
}
- object SingleType extends SingleTypeExtractor
+ final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym) with UniqueType { }
+ object SingleType extends SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type = {
+ unique(new UniqueSingleType(pre, sym))
+ }
+ }
abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
@@ -1198,10 +1212,12 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "SuperType"
}
+ final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp) with UniqueType { }
object SuperType extends SuperTypeExtractor {
- def apply(thistp: Type, supertp: Type): Type =
+ def apply(thistp: Type, supertp: Type): Type = {
if (phase.erasedTypes) supertp
- else unique(new SuperType(thistp, supertp) with UniqueType)
+ else unique(new UniqueSuperType(thistp, supertp))
+ }
}
/** A class for the bounds of abstract types and type parameters
@@ -1219,13 +1235,14 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "TypeBoundsType"
}
+ final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) with UniqueType { }
object TypeBounds extends TypeBoundsExtractor {
def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
-
- def apply(lo: Type, hi: Type): TypeBounds =
- unique(new TypeBounds(lo, hi) with UniqueType)
+ def apply(lo: Type, hi: Type): TypeBounds = {
+ unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
+ }
}
/** A common base class for intersection types and class types
@@ -1450,9 +1467,12 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "RefinedType"
}
+ final class RefinedType0(parents: List[Type], decls: Scope, clazz: Symbol) extends RefinedType(parents, decls) {
+ override def typeSymbol = clazz
+ }
object RefinedType extends RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope, clazz: Symbol) =
- new RefinedType(parents, decls) { override def typeSymbol = clazz }
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
+ new RefinedType0(parents, decls, clazz)
}
/** A class representing a class info
@@ -1617,15 +1637,15 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
override def kind = "ConstantType"
}
+ final class UniqueConstantType(value: Constant) extends ConstantType(value) with UniqueType {
+ /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
+ * which might not be found after 'flatten'. */
+ private lazy val _tpe: Type = value.tpe
+ override def underlying: Type = _tpe
+ }
object ConstantType extends ConstantTypeExtractor {
def apply(value: Constant): ConstantType = {
- class UniqueConstantType extends ConstantType(value) with UniqueType {
- /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
- * which might not be found after 'flatten'. */
- private lazy val _tpe: Type = value.tpe
- override def underlying: Type = _tpe
- }
- unique(new UniqueConstantType)
+ unique(new UniqueConstantType(value)).asInstanceOf[ConstantType]
}
}
@@ -1649,12 +1669,20 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
// assert(args.isEmpty || !sym.info.typeParams.isEmpty, this)
// assert(args.isEmpty || ((sym ne AnyClass) && (sym ne NothingClass))
- private val parentsCache = new ListOfTypesCache {
- @inline final def calculate() = thisInfo.parents map transform
- }
+ private var parentsCache: List[Type] = _
+ private var parentsPeriod = NoPeriod
+
private var baseTypeSeqCache: BaseTypeSeq = _
private var baseTypeSeqPeriod = NoPeriod
+ private var symInfoCache: Type = _
+ private var memberInfoCache: Type = _
+ private var thisInfoCache: Type = _
+ private var relativeInfoCache: Type = _
+
+ private var normalized: Type = null
+
+
override def isStable: Boolean = {
sym == NothingClass ||
sym == SingletonClass ||
@@ -1706,12 +1734,28 @@ trait Types extends reflect.generic.Types { self: SymbolTable =>
//@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
- def thisInfo =
+ def thisInfo: Type =
if (sym.isAliasType) normalize
- else if (sym.isNonClassType) transformInfo(sym.info)
- else sym.info
+ else if (!sym.isNonClassType) sym.info
+ else {
+ val symInfo = sym.info
+ if (thisInfoCache == null || (symInfo ne symInfoCache)) {
+ symInfoCache = symInfo
+ thisInfoCache = transformInfo(symInfo)
+ }
+ thisInfoCache
+ }
- def relativeInfo = if (sym.isNonClassType) transformInfo(pre.memberInfo(sym)) else pre.memberInfo(sym)
+ def relativeInfo: Type =
+ if (!sym.isNonClassType) pre.memberInfo(sym)
+ else {
+ val memberInfo = pre.memberInfo(sym)
+ if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
+ memberInfoCache = memberInfo
+ relativeInfoCache = transformInfo(memberInfo)
+ }
+ relativeInfoCache
+ }
override def typeSymbol = if (sym.isAliasType) normalize.typeSymbol else sym
override def termSymbol = if (sym.isAliasType) normalize.termSymbol else super.termSymbol
@@ -1731,7 +1775,18 @@ A type's typeSymbol should never be inspected directly.
if (sym.isAbstractType) thisInfo.bounds // transform(thisInfo.bounds).asInstanceOf[TypeBounds] // ??? seems to be doing asSeenFrom twice
else super.bounds
- override def parents: List[Type] = parentsCache.get()
+ override def parents: List[Type] = {
+ val period = parentsPeriod
+ if (period != currentPeriod) {
+ parentsPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ parentsCache = thisInfo.parents map transform
+ } else if (parentsCache == null) { // seems this can happen if things are currupted enough, see #2641
+ parentsCache = List(AnyClass.tpe)
+ }
+ }
+ parentsCache
+ }
override def typeOfThis = transform(sym.typeOfThis)
/*
@@ -1751,6 +1806,7 @@ A type's typeSymbol should never be inspected directly.
override def typeArgs: List[Type] = args
private def typeArgsOrDummies = if (!isHigherKinded) args else dummyArgs
+ // def hasFishyArgs = args == dummyArgs
// @MAT was typeSymbol.unsafeTypeParams, but typeSymbol normalizes now
private def typeParamsDirect =
@@ -1758,20 +1814,30 @@ A type's typeSymbol should never be inspected directly.
else sym.unsafeTypeParams
// placeholders derived from type params
- private def dummyArgs = typeParamsDirect map (_.typeConstructor) //@M must be .typeConstructor
+ private def dummyArgs = {
+ // @PP to @AM: this appears to me a place where
+ // higher-order tparams are going off the beam.
+ // if (sym.isAbstractType) { something goes wrong }
+
+ //@M must be .typeConstructor
+ typeParamsDirect map (_.typeConstructor)
+ }
// (!result.isEmpty) IFF isHigherKinded
override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
+ // note: does not go through typeRef. There's no need to because
+ // neither `pre` nor `sym` changes. And there's a performance
+ // advantage to call TypeRef directly.
override def typeConstructor = TypeRef(pre, sym, Nil)
- // note: does not go through typeRef. There's no need to because neither `pre' nor `sym' changes.
- // And there's a performance advantage to call TypeRef directly.
-
- // a reference (in a Scala program) to a type that has type parameters, but where the reference does not include type arguments
- // note that it doesn't matter whether the symbol refers to a java or scala symbol,
- // it does matter whether it occurs in java or scala code
- // typerefs w/o type params that occur in java signatures/code are considered raw types, and are represented as existential types
+ // A reference (in a Scala program) to a type that has type
+ // parameters, but where the reference does not include type
+ // arguments. Note that it doesn't matter whether the symbol refers
+ // to a java or scala symbol, it does matter whether it occurs in
+ // java or scala code. TypeRefs w/o type params that occur in java
+ // signatures/code are considered raw types, and are represented as
+ // existential types.
override def isHigherKinded = args.isEmpty && typeParamsDirect.nonEmpty
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
@@ -1779,29 +1845,32 @@ A type's typeSymbol should never be inspected directly.
val substTps = formals.intersect(typeParams)
if (sameLength(substTps, typeParams))
- typeRef(pre, sym, actuals)
+ copyTypeRef(this, pre, sym, actuals)
else if (sameLength(formals, actuals)) // partial application (needed in infer when bunching type arguments from classes and methods together)
- typeRef(pre, sym, dummyArgs).subst(formals, actuals)
+ copyTypeRef(this, pre, sym, dummyArgs).subst(formals, actuals)
else ErrorType
}
else
super.instantiateTypeParams(formals, actuals)
- private var normalized: Type = null
-
+ /** @pre: sym.info.typeParams.length == typeArgs.length */
@inline private def betaReduce: Type = {
- assert(sameLength(sym.info.typeParams, typeArgs), this)
- // isHKSubType0 introduces synthetic type params so that betaReduce can first apply sym.info to typeArgs before calling asSeenFrom
- // asSeenFrom then skips synthetic type params, which are used to reduce HO subtyping to first-order subtyping, but which can't be instantiated from the given prefix and class
- // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner) // this crashes pos/depmet_implicit_tpbetareduce.scala
+ // isHKSubType0 introduces synthetic type params so that
+ // betaReduce can first apply sym.info to typeArgs before calling
+ // asSeenFrom. asSeenFrom then skips synthetic type params, which
+ // are used to reduce HO subtyping to first-order subtyping, but
+ // which can't be instantiated from the given prefix and class.
transform(sym.info.resultType)
+ //
+ // this crashes pos/depmet_implicit_tpbetareduce.scala
+ // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
}
// @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
@inline private def etaExpand: Type = {
val tpars = sym.info.typeParams // must go through sym.info for typeParams to initialise symbol
- typeFunAnon(tpars, typeRef(pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
+ typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
}
override def dealias: Type =
@@ -1809,7 +1878,7 @@ A type's typeSymbol should never be inspected directly.
betaReduce.dealias
} else this
- def normalize0: Type =
+ private def normalize0: Type =
if (pre eq WildcardType) WildcardType // arises when argument-dependent types are approximated (see def depoly in implicits)
else if (isHigherKinded) etaExpand // eta-expand, subtyping relies on eta-expansion of higher-kinded types
else if (sym.isAliasType && sameLength(sym.info.typeParams, args))
@@ -1891,8 +1960,16 @@ A type's typeSymbol should never be inspected directly.
case TypeRef(_, RepeatedParamClass, arg :: _) => return arg + "*"
case TypeRef(_, ByNameParamClass, arg :: _) => return "=> " + arg
case _ =>
- if (isFunctionType(this))
- return normalize.typeArgs.init.mkString("(", ", ", ")") + " => " + normalize.typeArgs.last
+ if (isFunctionType(this)) {
+ val targs = normalize.typeArgs
+ // Aesthetics: printing Function1 as T => R rather than (T) => R
+ val paramlist = targs.init match {
+ case Nil => "()"
+ case x :: Nil => "" + x
+ case xs => xs.mkString("(", ", ", ")")
+ }
+ return paramlist + " => " + targs.last
+ }
else if (isTupleTypeOrSubtype(this))
return normalize.typeArgs.mkString("(", ", ", if (hasLength(normalize.typeArgs, 1)) ",)" else ")")
else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic)) {
@@ -1943,9 +2020,10 @@ A type's typeSymbol should never be inspected directly.
override def kind = "TypeRef"
}
+ final class UniqueTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends TypeRef(pre, sym, args) with UniqueType { }
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = {
- unique(new TypeRef(pre, sym, args) with UniqueType)
+ unique(new UniqueTypeRef(pre, sym, args))
}
}
@@ -2248,10 +2326,13 @@ A type's typeSymbol should never be inspected directly.
//@M
// a TypeVar used to be a case class with only an origin and a constr
- // then, constr became mutable (to support UndoLog, I guess), but pattern-matching returned the original constr0 (a bug)
+ // then, constr became mutable (to support UndoLog, I guess),
+ // but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
- // encapsulate suspension so we can automatically link the suspension of cloned typevars to their original if this turns out to be necessary
+ // encapsulate suspension so we can automatically link the suspension of cloned
+ // typevars to their original if this turns out to be necessary
+/*
def Suspension = new Suspension
class Suspension {
private val suspended = mutable.HashSet[TypeVar]()
@@ -2260,17 +2341,20 @@ A type's typeSymbol should never be inspected directly.
suspended += tv
}
def resumeAll(): Unit = {
- for(tv <- suspended) {
+ for (tv <- suspended) {
tv.suspended = false
}
- suspended.clear
+ suspended.clear()
}
}
-
+*/
def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams) // TODO why not initialise TypeConstraint with bounds of tparam?
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) = new TypeVar(origin, constr, args, params)
+ // TODO why not initialise TypeConstraint with bounds of tparam?
+ // @PP: I tried that, didn't work out so well for me.
+ def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) =
+ new TypeVar(origin, constr, args, params)
}
/** A class representing a type variable
@@ -2334,7 +2418,7 @@ A type's typeSymbol should never be inspected directly.
// </region>
// ignore subtyping&equality checks while true -- see findMember
- private[TypeVar] var suspended = false
+ private[Types] var suspended = false
/** Called when a TypeVar is involved in a subtyping check. Result is whether
* this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
@@ -2400,12 +2484,14 @@ A type's typeSymbol should never be inspected directly.
// this <: tp.baseType(sym)
if (suspended) checkSubtype(tp, origin)
else if (constr.instValid) checkSubtype(tp, constr.inst) // type var is already set
- else isRelatable(tp) && {
- unifySimple || unifyFull(tp) || unifyFull(tp.dealias) || unifyFull(tp.widen) || unifyParents
+ else isRelatable(tp) && { // gradually let go of some type precision in hopes of finding a type that has the same shape as the type variable
+ // okay, this just screams "CLEAN ME UP" -- I think we could use tp.widen instead of tp straight from the get-go in registerBound, since we don't infer singleton types anyway (but maybe that'll change?)
+ unifySimple || unifyFull(tp) || unifyFull(tp.dealias) || unifyFull(tp.widen) || unifyFull(tp.widen.dealias) || unifyParents
}
}
- def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = { //println("regTypeEq: "+(safeToString, debugString(tp), typeVarLHS)) //@MDEBUG
+ def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
+ //println("regTypeEq: "+(safeToString, debugString(tp), typeVarLHS)) //@MDEBUG
def checkIsSameType(tp: Type) =
if(typeVarLHS) constr.inst =:= tp
else tp =:= constr.inst
@@ -2566,8 +2652,7 @@ A type's typeSymbol should never be inspected directly.
// Creators ---------------------------------------------------------------
- /** Rebind symbol `sym' to an overriding member in type
- * `pre'.
+ /** Rebind symbol `sym' to an overriding member in type `pre'.
*/
private def rebind(pre: Type, sym: Symbol): Symbol = {
val owner = sym.owner
@@ -2603,14 +2688,12 @@ A type's typeSymbol should never be inspected directly.
var sym1 = rebind(pre, sym)
val pre1 = removeSuper(pre, sym1)
if (pre1 ne pre) sym1 = rebind(pre1, sym1)
- // why not do the hash-consing in the SingleType.apply()
- // factory, like the other UniqueTypes?
- unique(new SingleType(pre1, sym1) with UniqueType)
+ SingleType(pre1, sym1)
}
}
/** the canonical creator for a refined type with a given scope */
- def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos : Position): Type = {
+ def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
@@ -2679,6 +2762,27 @@ A type's typeSymbol should never be inspected directly.
}
}
+ def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match {
+ case TypeRef(pre0, sym0, args0) =>
+ if ((pre == pre0) && (sym.name == sym0.name)) {
+
+ val sym1 = sym
+ // we require that object is initialized, thus info.typeParams instead of typeParams.
+ if (sym1.isAliasType && sameLength(sym1.info.typeParams, args)) {
+ if (sym1.lockOK) TypeRef(pre, sym1, args) // don't expand type alias (cycles checked by lockOK)
+ else throw new TypeError("illegal cyclic reference involving " + sym1)
+ }
+ else {
+ TypeRef(pre, sym1, args)
+ }
+
+ } else
+ typeRef(pre, sym, args)
+ }
+
+
+
+
/** The canonical creator for implicit method types */
def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
new JavaMethodType(params, resultType) // don't unique this!
@@ -2725,8 +2829,8 @@ A type's typeSymbol should never be inspected directly.
def appliedType(tycon: Type, args: List[Type]): Type =
if (args.isEmpty) tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
else tycon match {
- case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => typeRef(pre, sym, Nil) //@M drop type args to Any/Nothing
- case TypeRef(pre, sym, _) => typeRef(pre, sym, args)
+ case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
+ case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
case ExistentialType(tparams, restpe) => ExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
@@ -3010,7 +3114,7 @@ A type's typeSymbol should never be inspected directly.
else mapOverArgs(args, tparams)
}
if ((pre1 eq pre) && (args1 eq args)) tp
- else typeRef(pre1, coevolveSym(pre, pre1, sym), args1)
+ else copyTypeRef(tp, pre1, coevolveSym(pre, pre1, sym), args1)
case ThisType(_) => tp
case SingleType(pre, sym) =>
if (sym.isPackageClass) tp // short path
@@ -3418,7 +3522,7 @@ A type's typeSymbol should never be inspected directly.
/** A base class to compute all substitutions */
abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
- val fromContains = from.toSet // avoiding repeatedly traversing from
+ val fromContains = (x: Symbol) => from.contains(x) //from.toSet <-- traversing short lists seems to be faster than allocating sets
assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
/** Are `sym' and `sym1' the same.
@@ -3429,12 +3533,6 @@ A type's typeSymbol should never be inspected directly.
/** Map target to type, can be tuned by subclasses */
protected def toType(fromtp: Type, tp: T): Type
- def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
-
protected def renameBoundSyms(tp: Type): Type = tp match {
case MethodType(ps, restp) =>
val ps1 = cloneSymbols(ps)
@@ -3450,6 +3548,12 @@ A type's typeSymbol should never be inspected directly.
}
def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+ @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
+
val boundSyms = tp0.boundSyms
val tp1 = if (boundSyms exists fromContains) renameBoundSyms(tp0) else tp0
val tp = mapOver(tp1)
@@ -3479,11 +3583,11 @@ A type's typeSymbol should never be inspected directly.
/** A map to implement the `substSym' method. */
class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
- case TypeRef(pre, _, args) => typeRef(pre, sym, args)
+ case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
case SingleType(pre, _) => singleType(pre, sym)
}
override def apply(tp: Type): Type = if (from.isEmpty) tp else {
- def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
+ @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
if (from.isEmpty) sym
// else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
else if (matches(from.head, sym)) to.head
@@ -3492,7 +3596,7 @@ A type's typeSymbol should never be inspected directly.
case TypeRef(pre, sym, args) if pre ne NoPrefix =>
val newSym = subst(sym, from, to)
// assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- mapOver(typeRef(pre, newSym, args)) // mapOver takes care of subst'ing in args
+ mapOver(copyTypeRef(tp, pre, newSym, args)) // mapOver takes care of subst'ing in args
case SingleType(pre, sym) if pre ne NoPrefix =>
mapOver(singleType(pre, subst(sym, from, to)))
case _ =>
@@ -3875,8 +3979,14 @@ A type's typeSymbol should never be inspected directly.
val args1 = args mapConserve (this)
try {
val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) tp
- else typeRef(pre1, sym1, args1)
+ if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+ tp
+ } else if (sym1 == NoSymbol) {
+ if (settings.debug.value) println("adapt fail: "+pre+" "+pre1+" "+sym)
+ tp
+ } else {
+ copyTypeRef(tp, pre1, sym1, args1)
+ }
} catch {
case ex: MissingAliasControl =>
apply(tp.dealias)
@@ -4018,7 +4128,7 @@ A type's typeSymbol should never be inspected directly.
patType match {
case TypeRef(pre, sym, args) =>
val pre1 = maybeCreateDummyClone(pre, sym)
- (pre1 ne NoType) && isPopulated(typeRef(pre1, sym, args), selType)
+ (pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
case _ =>
false
}
@@ -4443,7 +4553,7 @@ A type's typeSymbol should never be inspected directly.
def instTypeVar(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
- typeRef(instTypeVar(pre), sym, args)
+ copyTypeRef(tp, instTypeVar(pre), sym, args)
case SingleType(pre, sym) =>
singleType(instTypeVar(pre), sym)
case TypeVar(_, constr) =>
@@ -4965,27 +5075,62 @@ A type's typeSymbol should never be inspected directly.
// Lubs and Glbs ---------------------------------------------------------
- /** The least sorted upwards closed upper bound of a non-empty list
- * of lists of types relative to the following ordering <= between lists of types:
+ /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+ * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
*
* xs <= ys iff forall y in ys exists x in xs such that x <: y
*
+ *
+ * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+ * (these type parameters may be referred to by type arguments in the BTS column of those types,
+ * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+ * @arg tsBts a matrix whose columns are basetype sequences
+ * the first row is the original list of types for which we're computing the lub
+ * (except that type constructors have been applied to their dummyArgs)
* @See baseTypeSeq for a definition of sorted and upwards closed.
*/
- private def lubList(tss: List[List[Type]], depth: Int): List[Type] =
- if (tss.tail.isEmpty) tss.head
- else if (tss exists (_.isEmpty)) List()
+ private def lubList(tsParams: List[List[Symbol]], tsBts: List[List[Type]], depth: Int): List[Type] = {
+ // strip typerefs in ts from their arguments if those refer to type parameters that are meant to be bound
+ // TODO: this only deals with the simplest of type constructors
+ // a better fix would be to actually bind those type parameters that appear free in error, but that would require major changes to the BTS infrastructure
+ // example that only kindasorta works now...
+ // given: trait Container[+T]; trait Template[+CC[X] <: Container[X]]; class C1[T] extends Template[Container] with Container[T]
+ // C1's BTS contains Template[Container] with Container[T], but that should really be [T] => Template[Container] with Container[T]
+ // instead of wrapping it in a polytype, the current approach uses elimHOTparams to patch up this type so that
+ // it looks more like a type ctor: Template[Container] with Container, but this is ill-kinded as Template[Container] is a proper type, whereas Container is not
+ def elimHOTparams(ts: List[Type]) = ts map {
+ case tp@TypeRef(pre, sym, args) if args.nonEmpty && tsParams.contains(args.map(_.typeSymbol)) => tp.typeConstructor
+ case tp => tp
+ }
+
+ if (tsBts.tail.isEmpty) tsBts.head
+ else if (tsBts exists (_.isEmpty)) List()
else {
- val ts0 = tss map (_.head)
- val sym = minSym(ts0)
- if (ts0 forall (_.typeSymbol == sym))
- mergePrefixAndArgs(elimSub(ts0, depth), 1, depth).toList ::: lubList(tss map (_.tail), depth)
- else
- lubList(tss map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts), depth)
+ val ts0 = tsBts map (_.head) // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts,
+ // invariant: all symbols "under" (closer to the first row) the frontier are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+
+ // is the frontier made up of types with the same symbol?
+ // --> produce a single type for this frontier by merging the prefixes and arguments of these typerefs that share the same symbol
+ // due to the invariant, that symbol is the current maximal symbol for which this holds, i.e., the one that conveys most information wrt subtyping
+ // before merging, strip type arguments that refer to bound type params (when we're computing the lub of type constructors)
+ // furthermore, the number of types to merge is reduced without losing information by dropping types that are a subtype of some other type
+ val sym0 = ts0.head.typeSymbol
+ if (ts0.tail forall (_.typeSymbol == sym0)){
+ mergePrefixAndArgs(elimSub(elimHOTparams(ts0), depth), 1, depth).toList ::: lubList(tsParams, tsBts map (_.tail), depth)
+ } else {
+ // frontier is not uniform yet, move it beyond the current minimal symbol; lather, rince, repeat
+ val sym = minSym(ts0)
+ lubList(tsParams, tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts), depth)
+ }
}
-
- private def lubBaseTypeSeq(tss: List[BaseTypeSeq], depth: Int): List[Type] =
- lubList(tss map (_.toList), depth)
+ }
+ // @AM the following problem is solved by elimHOTparams in lublist
+ // @PP lubLists gone bad: lubList(List(
+ // List(scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq], ScalaObject, java.lang.Object, Any)
+ // List(scala.collection.generic.GenericCompanion[scala.collection.mutable.Seq], ScalaObject, java.lang.Object, Any)
+ // )) == (
+ // List(scala.collection.generic.GenericCompanion[Seq**[Any]**], ScalaObject, java.lang.Object, Any)
+ // )
/** The minimal symbol (wrt Symbol.isLess) of a list of types */
private def minSym(tps: List[Type]): Symbol =
@@ -5009,6 +5154,12 @@ A type's typeSymbol should never be inspected directly.
val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
if (rest exists (t1 => t1 <:< t)) rest else t :: rest
}
+ def elimAnonymousClass(t: Type) = t match {
+ case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
+ clazz.classBound.asSeenFrom(pre, clazz.owner)
+ case _ =>
+ t
+ }
/** A collector that tests for existential types appearing at given variance in a type */
class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) {
@@ -5028,12 +5179,6 @@ A type's typeSymbol should never be inspected directly.
/** Eliminate from list of types all elements which are a subtype
* of some other element of the list. */
private def elimSub(ts: List[Type], depth: Int): List[Type] = {
- def elimAnonymousClass(t: Type) = t match {
- case TypeRef(pre, clazz, List()) if clazz.isAnonymousClass =>
- clazz.classBound.asSeenFrom(pre, clazz.owner)
- case _ =>
- t
- }
def elimSub0(ts: List[Type]): List[Type] = ts match {
case List() => List()
case t :: ts1 =>
@@ -5158,8 +5303,7 @@ A type's typeSymbol should never be inspected directly.
}
def lub1(ts0: List[Type]): Type = {
val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val bts: List[BaseTypeSeq] = ts map (_.baseTypeSeq)
- val lubBaseTypes: List[Type] = lubBaseTypeSeq(bts, depth)
+ val lubBaseTypes: List[Type] = lubList(ts map (_.typeParams), ts map (_.baseTypeSeq.toList), depth)
val lubParents = spanningTypes(lubBaseTypes)
val lubOwner = commonOwner(ts)
val lubBase = intersectionType(lubParents, lubOwner)
@@ -5209,9 +5353,25 @@ A type's typeSymbol should never be inspected directly.
}
}
if (lubRefined.decls.isEmpty) lubBase
+ else if (!verifyLubs) lubRefined
else {
-// println("refined lub of "+ts+"/"+narrowts+" is "+lubRefined+", baseclasses = "+(ts map (_.baseTypeSeq) map (_.toList)))
- lubRefined
+ // Verify that every given type conforms to the calculated lub.
+ // In theory this should not be necessary, but higher-order type
+ // parameters are not handled correctly.
+ val ok = ts forall { t =>
+ (t <:< lubRefined) || {
+ if (settings.debug.value) {
+ Console.println(
+ "Malformed lub: " + lubRefined + "\n" +
+ "Argument " + t + " does not conform. Falling back to " + lubBase
+ )
+ }
+ false
+ }
+ }
+ // If not, fall back on the more conservative calculation.
+ if (ok) lubRefined
+ else lubBase
}
}
existentialAbstraction(tparams, lubType)
@@ -5406,13 +5566,12 @@ A type's typeSymbol should never be inspected directly.
else Some(typeRef(pre, sym, List(lub(args))))
}
} else {
- val args = (sym.typeParams, argss.transpose).zipped map {
- (tparam, as) =>
+ val args = (sym.typeParams, argss.transpose).zipped map { (tparam, as) =>
if (depth == 0)
if (tparam.variance == variance) AnyClass.tpe
else if (tparam.variance == -variance) NothingClass.tpe
else NoType
- else
+ else {
if (tparam.variance == variance) lub(as, decr(depth))
else if (tparam.variance == -variance) glb(as, decr(depth))
else {
@@ -5428,7 +5587,8 @@ A type's typeSymbol should never be inspected directly.
qvar.tpe
}
}
- }
+ }
+ }
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
}
@@ -5473,26 +5633,32 @@ A type's typeSymbol should never be inspected directly.
* Returns list of list of bounds infos, where corresponding type
* parameters are renamed to tparams.
*/
- private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] =
- tps map {
+ private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+ def getBounds(tp: Type): List[Type] = tp match {
case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
- case _ =>
- throw new NoCommonType(tps)
+ case tp =>
+ if (tp ne tp.normalize) getBounds(tp.normalize)
+ else throw new NoCommonType(tps)
}
+ tps map getBounds
+ }
/** All types in list must be polytypes with type parameter lists of
* same length as tparams.
* Returns list of instance types, where corresponding type
* parameters are renamed to tparams.
*/
- private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] =
- tps map {
+ private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+ def transformResultType(tp: Type): Type = tp match {
case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
restpe.substSym(tparams1, tparams)
- case _ =>
- throw new NoCommonType(tps)
+ case tp =>
+ if (tp ne tp.normalize) transformResultType(tp.normalize)
+ else throw new NoCommonType(tps)
}
+ tps map transformResultType
+ }
/** All types in list must be method types with equal parameter types.
* Returns list of their result types.
@@ -5508,10 +5674,11 @@ A type's typeSymbol should never be inspected directly.
}
-// TODO: this desperately needs to be cleaned up
-// plan: split into kind inference and subkinding
-// every Type has a (cached) Kind
- def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean = checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+ // TODO: this desperately needs to be cleaned up
+ // plan: split into kind inference and subkinding
+ // every Type has a (cached) Kind
+ def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
+ checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
/** Check well-kindedness of type application (assumes arities are already checked) -- @M
*
@@ -5541,7 +5708,15 @@ A type's typeSymbol should never be inspected directly.
def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
// check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(hkargs: List[Symbol], arg: Symbol, param: Symbol, paramowner: Symbol, underHKParams: List[Symbol], withHKArgs: List[Symbol]): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
+ def checkKindBoundsHK(
+ hkargs: List[Symbol],
+ arg: Symbol,
+ param: Symbol,
+ paramowner: Symbol,
+ underHKParams: List[Symbol],
+ withHKArgs: List[Symbol]
+ ): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
+
def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
// @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
val hkparams = param.typeParams
@@ -5553,12 +5728,14 @@ A type's typeSymbol should never be inspected directly.
}
if (!sameLength(hkargs, hkparams)) {
- if(arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
- else {error = true; (List((arg, param)), Nil, Nil)} // shortcut: always set error, whether explainTypesOrNot
- } else {
- val _arityMismatches = if(explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _varianceMismatches = if(explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _stricterBounds = if(explainErrors)new ListBuffer[(Symbol, Symbol)] else null
+ if (arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
+ else {error = true; (List((arg, param)), Nil, Nil) } // shortcut: always set error, whether explainTypesOrNot
+ }
+ else {
+ val _arityMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
+ val _varianceMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
+ val _stricterBounds = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
+
def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true}
def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true }
def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as }
@@ -5574,42 +5751,66 @@ A type's typeSymbol should never be inspected directly.
// substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
// --> their arguments use different symbols, but are conceptually the same
// (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- if (!(bindHKParams(transformedBounds(hkparam, paramowner)) <:< transform(hkarg.info.bounds, owner)))
+ val declaredBounds = transformedBounds(hkparam, paramowner)
+ val declaredBoundsInst = bindHKParams(declaredBounds)
+ val argumentBounds = transform(hkarg.info.bounds, owner)
+ if (!(declaredBoundsInst <:< argumentBounds))
stricterBound(hkarg, hkparam)
- if (settings.debug.value) {
- log("checkKindBoundsHK base case: "+ hkparam +" declared bounds: "+ transformedBounds(hkparam, paramowner) +" after instantiating earlier hkparams: "+ bindHKParams(transformedBounds(hkparam, paramowner)))
- log("checkKindBoundsHK base case: "+ hkarg +" has bounds: "+ transform(hkarg.info.bounds, owner))
- }
- } else {
- if(settings.debug.value) log("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
- val (am, vm, sb) = checkKindBoundsHK(hkarg.typeParams, hkarg, hkparam, paramowner, underHKParams ++ hkparam.typeParams, withHKArgs ++ hkarg.typeParams)
+ if (settings.debug.value) log(
+ "checkKindBoundsHK base case: " + hkparam +
+ " declared bounds: " + declaredBounds +
+ " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
+ "checkKindBoundsHK base case: "+ hkarg +
+ " has bounds: " + argumentBounds
+ )
+ }
+ else {
+ if (settings.debug.value)
+ log("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ val (am, vm, sb) = checkKindBoundsHK(
+ hkarg.typeParams,
+ hkarg,
+ hkparam,
+ paramowner,
+ underHKParams ++ hkparam.typeParams,
+ withHKArgs ++ hkarg.typeParams
+ )
arityMismatches(am)
varianceMismatches(vm)
stricterBounds(sb)
}
- if(!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
+ if (!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
}
- if(!explainErrors) (Nil, Nil, Nil)
+ if (!explainErrors) (Nil, Nil, Nil)
else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
}
}
val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])]
- (tparams zip targs).foreach{ case (tparam, targ) if (targ.isHigherKinded || !tparam.typeParams.isEmpty) =>
- // @M must use the typeParams of the type targ, not the typeParams of the symbol of targ!!
+ if (tparams.nonEmpty || targs.nonEmpty)
+ log("checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")")
+
+ for {
+ (tparam, targ) <- tparams zip targs
+ // Prevent WildcardType from causing kind errors, as typevars may be higher-order
+ if (targ != WildcardType) && (targ.isHigherKinded || tparam.typeParams.nonEmpty)
+ } {
+ // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
targ.typeSymbolDirect.info // force symbol load for #4205
- val tparamsHO = targ.typeParams
-
- val (arityMismatches, varianceMismatches, stricterBounds) =
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO) // NOTE: *not* targ.typeSymbol, which normalizes
-
- if(!explainErrors) {if(error) return List((NoType, NoSymbol, Nil, Nil, Nil))}
- else if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
- errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
+ val tparamsHO = targ.typeParams
+
+ val (arityMismatches, varianceMismatches, stricterBounds) = (
+ // NOTE: *not* targ.typeSymbol, which normalizes
+ checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO)
+ )
+ if (explainErrors) {
+ if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
+ errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
+ }
}
- // case (tparam, targ) => println("no check: "+(tparam, targ, tparam.typeParams.isEmpty))
- case _ =>
+ else if (error)
+ return List((NoType, NoSymbol, Nil, Nil, Nil))
}
errors.toList
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 08094d4104..36112e58de 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -741,7 +741,7 @@ abstract class ClassfileParser {
case variance @ ('+' | '-' | '*') =>
index += 1
val bounds = variance match {
- case '+' => TypeBounds.upper(sig2type(tparams, skiptvs))
+ case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
case '-' => TypeBounds.lower(sig2type(tparams, skiptvs))
case '*' => TypeBounds.empty
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 007ab36e5e..eb6f37dfbe 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -66,6 +66,39 @@ abstract class Erasure extends AddInterfaces
}
}
+ // A type function from T => Class[U], used to determine the return
+ // type of getClass calls. The returned type is:
+ //
+ // 1. If T is a value type, Class[T].
+ // 2. If T is anonymous or a refinement type, calculate the intersection
+ // dominator of the parents T', and Class[_ <: T'].
+ // 3. If T is a phantom type (Any or AnyVal), Class[_].
+ // 4. Otherwise, Class[_ <: T].
+ //
+ // Note: AnyVal cannot be Class[_ <: AnyVal] because if the static type of the
+ // receiver is AnyVal, it implies the receiver is boxed, so the correct
+ // class object is that of java.lang.Integer, not Int.
+ //
+ // TODO: If T is final, return type could be Class[T]. Should it?
+ def getClassReturnType(tp: Type): Type = {
+ def mkClass(targs: List[Type]) = typeRef(ClassClass.tpe.prefix, ClassClass, targs)
+ val tparams = ClassClass.typeParams
+ val sym = tp.typeSymbol
+
+ if (tparams.isEmpty) mkClass(Nil) // call must be coming post-erasure
+ else if (isValueClass(sym)) mkClass(List(tp.widen))
+ else if (sym.isLocalClass) getClassReturnType(erasure.intersectionDominator(tp.parents))
+ else {
+ val eparams = typeParamsToExistentials(ClassClass, tparams)
+ val upperBound = if (isPhantomClass(sym)) AnyClass.tpe else tp.widen
+
+ existentialAbstraction(
+ eparams,
+ mkClass(List(eparams.head setInfo TypeBounds.upper(upperBound) tpe))
+ )
+ }
+ }
+
private def unboundedGenericArrayLevel(tp: Type): Int = tp match {
case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
case _ => 0
@@ -230,7 +263,7 @@ abstract class Erasure extends AddInterfaces
}
}
// for debugging signatures: traces logic given system property
- private val traceProp = sys.BooleanProp keyExists "scalac.sigs.trace"
+ private val traceProp = (sys.BooleanProp keyExists "scalac.sigs.trace").value // performance: get the value here
private val traceSig = util.Tracer(traceProp)
/** This object is only used for sanity testing when -check:genjvm is set.
@@ -491,6 +524,12 @@ abstract class Erasure extends AddInterfaces
case _ => tp.deconst
}
}
+ // Methods on Any/Object which we rewrite here while we still know what
+ // is a primitive and what arrived boxed.
+ private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass) ++ (
+ // Each value class has its own getClass for ultra-precise class object typing.
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
// -------- erasure on trees ------------------------------------------
@@ -529,6 +568,7 @@ abstract class Erasure extends AddInterfaces
case UnitClass =>
if (treeInfo isPureExpr tree) REF(BoxedUnit_UNIT)
else BLOCK(tree, REF(BoxedUnit_UNIT))
+ case NothingClass => tree // a non-terminating expression doesn't need boxing
case x =>
assert(x != ArrayClass)
tree match {
@@ -576,8 +616,14 @@ abstract class Erasure extends AddInterfaces
/** Generate a synthetic cast operation from tree.tpe to pt.
* @pre pt eq pt.normalize
*/
- private def cast(tree: Tree, pt: Type): Tree =
- tree AS_ATTR pt
+ private def cast(tree: Tree, pt: Type): Tree = {
+ if (pt.typeSymbol == UnitClass) {
+ // See SI-4731 for one example of how this occurs.
+ log("Attempted to cast to Unit: " + tree)
+ tree.duplicate setType pt
+ }
+ else tree AS_ATTR pt
+ }
private def isUnboxedValueMember(sym: Symbol) =
sym != NoSymbol && isValueClass(sym.owner)
@@ -596,7 +642,7 @@ abstract class Erasure extends AddInterfaces
else if (isValueClass(tree.tpe.typeSymbol) && !isValueClass(pt.typeSymbol))
adaptToType(box(tree), pt)
else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.params.isEmpty) {
- if (!tree.symbol.isStable) assert(false, "adapt "+tree+":"+tree.tpe+" to "+pt)
+ assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
} else if (pt <:< tree.tpe)
cast(tree, pt)
@@ -973,17 +1019,24 @@ abstract class Erasure extends AddInterfaces
SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
args)
- case Apply(fn @ Select(qual, _), Nil) if fn.symbol == Any_## || fn.symbol == Object_## =>
- // This is unattractive, but without it we crash here on ().## because after
- // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
- // This must be because some earlier transformation is being skipped on ##, but so
- // far I don't know what. For null we now define null.## == 0.
- val arg = qual.tpe.typeSymbolDirect match {
- case UnitClass => BLOCK(qual, REF(BoxedUnit_UNIT)) // ({ expr; UNIT }).##
- case NullClass => LIT(0) // (null: Object).##
- case _ => qual
+ case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) =>
+ if (fn.symbol == Any_## || fn.symbol == Object_##) {
+ // This is unattractive, but without it we crash here on ().## because after
+ // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
+ // This must be because some earlier transformation is being skipped on ##, but so
+ // far I don't know what. For null we now define null.## == 0.
+ val arg = qual.tpe.typeSymbolDirect match {
+ case UnitClass => BLOCK(qual, REF(BoxedUnit_UNIT)) // ({ expr; UNIT }).##
+ case NullClass => LIT(0) // (null: Object).##
+ case _ => qual
+ }
+ Apply(gen.mkAttributedRef(scalaRuntimeHash), List(arg))
}
- Apply(gen.mkAttributedRef(scalaRuntimeHash), List(arg))
+ // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
+ else if (isValueClass(qual.tpe.typeSymbol))
+ Apply(gen.mkAttributedRef(scalaRuntimeAnyValClass), List(qual))
+ else
+ tree
case Apply(fn, args) =>
if (fn.symbol == Any_asInstanceOf)
@@ -1021,9 +1074,18 @@ abstract class Erasure extends AddInterfaces
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
case RefinedType(parents, decls) if (parents.length >= 2) =>
- gen.evalOnce(qual, currentOwner, unit) { q =>
+ // Optimization: don't generate isInstanceOf tests if the static type
+ // conforms, because it always succeeds. (Or at least it had better.)
+ // At this writing the pattern matcher generates some instance tests
+ // involving intersections where at least one parent is statically known true.
+ // That needs fixing, but filtering the parents here adds an additional
+ // level of robustness (in addition to the short term fix.)
+ val parentTests = parents filterNot (qual.tpe <:< _)
+
+ if (parentTests.isEmpty) Literal(Constant(true))
+ else gen.evalOnce(qual, currentOwner, unit) { q =>
atPos(tree.pos) {
- parents map mkIsInstanceOf(q) reduceRight gen.mkAnd
+ parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
}
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index ac4f19f69d..2a27daa5f9 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -580,6 +580,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
REF(sym.owner.sourceModule) DOT sym
}
+ @inline private def bitmapOperation[T](field: Symbol, transientCase: => T, privateCase: => T, rest: => T): T =
+ if (field.accessed.hasAnnotation(TransientAttr))
+ transientCase
+ else if (field.hasFlag(PRIVATE | notPRIVATE))
+ privateCase
+ else
+ rest
+
/** Add all new definitions to a non-trait class
* These fall into the following categories:
* - for a trait interface:
@@ -672,14 +680,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
import lazyVals._
- def bitmapOperation[T](field: Symbol, transientCase: => T, privateCase: => T, rest: => T): T =
- if (field.accessed.hasAnnotation(TransientAttr))
- transientCase
- else if (field.hasFlag(PRIVATE) || field.hasFlag(notPRIVATE))
- privateCase
- else
- rest
-
/**
* Private or transient lazy vals use bitmaps that are private for the class context,
* unlike public or protected vals, which can use inherited bitmaps.
@@ -892,8 +892,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case DefDef(mods, name, tp, vp, tpt, rhs)
if sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.hasFlag(BRIDGE) =>
val attrThis =
- if (clazz.isImplClass) gen.mkAttributedIdent(vp.head.head.symbol)
- else gen.mkAttributedThis(clazz)
+ if (clazz.isImplClass) {
+ gen.mkAttributedIdent(vp.head.head.symbol)
+ // Martin to Hubert I think this can be replaced by selfRef(tree.pos)
+ } else
+ gen.mkAttributedThis(clazz)
val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, rhs)
treeCopy.DefDef(stat, mods, name, tp, vp, tpt, typedPos(stat.pos)(rhs1))
case _ => stat
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index e33491aa29..96158fb451 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -21,28 +21,44 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def changesBaseClasses = true
override def keepsTypeParams = true
- /** Concrete types for specialization */
-// final lazy val concreteTypes = List(definitions.IntClass.tpe, definitions.DoubleClass.tpe)
-
type TypeEnv = immutable.Map[Symbol, Type]
- def emptyEnv: TypeEnv = immutable.ListMap.empty[Symbol, Type]
+ def emptyEnv: TypeEnv = Map[Symbol, Type]()
+ private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
import definitions.{
- RootClass, BooleanClass, UnitClass, ArrayClass, ScalaValueClasses,
+ RootClass, BooleanClass, UnitClass, ArrayClass,
+ ScalaValueClasses, isValueClass, isScalaValueType,
SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass,
- AnyRefClass, Predef_AnyRef, ObjectClass,
- isValueClass
+ AnyRefClass, ObjectClass, Predef_AnyRef,
+ uncheckedVarianceClass
+ }
+ private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass
+ private def hasSpecializedFlag(sym: Symbol) = sym hasFlag SPECIALIZED
+ private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized
+ private def specializedOn(sym: Symbol) = sym getAnnotation SpecializedClass match {
+ case Some(AnnotationInfo(_, args, _)) => args
+ case _ => Nil
}
- private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass
- private def isSpecializedOnAnyRef(sym: Symbol) = sym.getAnnotation(SpecializedClass) match {
- case Some(AnnotationInfo(_, args, _)) => args.exists(_.symbol == Predef_AnyRef)
- case _ => false
+ // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
+ // then pos/spec-List.scala fails - why? Does this kind of check fail
+ // for similar reasons? Does `sym.isAbstractType` make a difference?
+ private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = (
+ specializedOn(sym).exists(_.symbol == Predef_AnyRef) // specialized on AnyRef
+ && !isValueClass(tp.typeSymbol)
+ && isBoundedGeneric(tp)
+ )
+ private def isBoundedGeneric(tp: Type) = tp match {
+ case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
+ case TypeRef(_, sym, _) => !isValueClass(sym)
+ case _ => false
}
- private def specializedOn(sym: Symbol) = sym.getAnnotation(SpecializedClass) match {
- case Some(AnnotationInfo(_, args, _)) => args
- case _ => Nil
+ @inline private def debuglog(msg: => String) {
+ if (settings.debug.value) log(msg)
+ }
+ @inline private def ifDebug(body: => Unit) {
+ if (settings.debug.value) { body }
}
object TypeEnv {
@@ -50,7 +66,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the given args. Expects the lists to have the same length.
*/
def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = {
- assert(sym.info.typeParams.length == args.length, sym + " args: " + args)
+ ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args))
+
emptyEnv ++ (sym.info.typeParams zip args filter (kv => isSpecialized(kv._1)))
}
@@ -61,14 +78,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
- val t2tpopt = t2 get sym
- // log("includes: " + t2tpopt.map(_.getClass))
- // log(tpe.getClass)
- t2tpopt match {
- case Some(t2tp) => t2tp == tpe || {
- (!primitiveTypes.contains(tpe)) && (!isValueClass(t2tp.typeSymbol)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
- }
- case None => false
+ t2 get sym exists { t2tp =>
+ (tpe == t2tp) || !(isScalaValueType(tpe) || isScalaValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
}
}
@@ -81,21 +92,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* to a type for which `sym' is specialized.
*/
def isValid(env: TypeEnv, sym: Symbol): Boolean = {
- def validBinding(tvar: Symbol, tpe: Type, sym: Symbol) = (
- isSpecialized(tvar)
- && sym.typeParams.contains(tvar)
- && concreteTypes(tvar).contains(tpe)
- )
- env forall {
- case (tvar, tpe) =>
-// log("isValid: " + env + " sym: " + sym + " sym.tparams: " + sym.typeParams)
-// log("Flag " + tvar + ": " + tvar.hasAnnotation(SpecializedClass))
-// log("tparams contains: " + sym.typeParams.contains(tvar))
-// log("concreteTypes: " + concreteTypes.contains(tpe))
-
- validBinding(tvar, tpe, sym) || (
- (sym.owner != RootClass) && validBinding(tvar, tpe, sym.owner)
- )
+ env forall { case (tvar, tpe) =>
+ isSpecialized(tvar) && (concreteTypes(tvar) contains tpe) && {
+ (sym.typeParams contains tvar) ||
+ (sym.owner != RootClass && (sym.owner.typeParams contains tvar))
+ }
}
}
}
@@ -107,22 +108,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* 'cls' itself if cls is not a specialized subclass.
*/
def genericClass(cls: Symbol): Symbol =
- if (cls.hasFlag(SPECIALIZED))
- cls.info.parents.head.typeSymbol
- else
- cls
+ if (hasSpecializedFlag(cls)) cls.info.parents.head.typeSymbol
+ else cls
/** Map a method symbol to a list of its specialized overloads in the same class. */
- private val overloads: mutable.Map[Symbol, List[Overload]] = new mutable.HashMap[Symbol, List[Overload]] {
- override def default(key: Symbol): List[Overload] = Nil
- }
+ private val overloads: mutable.Map[Symbol, List[Overload]] =
+ new mutable.HashMap[Symbol, List[Overload]] {
+ override def default(key: Symbol): List[Overload] = Nil
+ }
case class Overload(sym: Symbol, env: TypeEnv) {
- override def toString: String =
- "specialized overload " + sym + " in " + env
+ override def toString = "specialized overload " + sym + " in " + env
}
-
/** Just to mark uncheckable */
override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = new SpecializationPhase(prev)
class SpecializationPhase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) {
@@ -181,11 +179,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
override lazy val degenerate = {
- log("degenerate: " + target +
- " stv tparams: " + specializedTypeVars(target.info.typeParams map (_.info)) +
- " stv info: " + specializedTypeVars(target.info.resultType))
- !(specializedTypeVars(target.info.typeParams map (_.info))
- -- specializedTypeVars(target.info.resultType)).isEmpty
+ val stvTypeParams = specializedTypeVars(target.info.typeParams map (_.info))
+ val stvResult = specializedTypeVars(target.info.resultType)
+
+ log("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult)
+
+ (stvTypeParams -- stvResult).nonEmpty
}
}
@@ -193,12 +192,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private val info: mutable.Map[Symbol, SpecializedInfo] = new mutable.HashMap[Symbol, SpecializedInfo]
/** Has `clazz' any type parameters that need be specialized? */
- def hasSpecializedParams(clazz: Symbol): Boolean =
- !specializedParams(clazz).isEmpty
+ def hasSpecializedParams(clazz: Symbol) =
+ clazz.info.typeParams exists isSpecialized
/** Return specialized type parameters. */
def specializedParams(sym: Symbol): List[Symbol] =
- splitParams(sym.info.typeParams)._1
+ sym.info.typeParams filter isSpecialized
def splitParams(tps: List[Symbol]) =
tps partition isSpecialized
@@ -208,49 +207,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* specialized type.
*/
def survivingArgs(sym: Symbol, args: List[Type]): List[Type] =
- for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !isSpecialized(tvar) || !isPrimitive(tpe))
+ for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !isSpecialized(tvar) || !isScalaValueType(tpe))
yield tpe
- private def isBoundedGeneric(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
- case TypeRef(_, sym, _) => !isValueClass(sym)
- case _ => false
- }
-
- private def str(tp: Type): AnyRef = tp match {
- case TypeRef(pre, sym, args) => (str(pre), sym, args)
- case _ => "nontpref sym: " + tp.typeSymbol + ", " + tp.getClass
- }
-
- // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe), then pos/spec-List.scala fails - why?
- // Does this kind of check fail for similar reasons? Does `sym.isAbstractType` make a difference?
- private def subtypeOfAnyRef(tp: Type) = {
- // log(tp + " <:< AnyRef? tp has symbol: " + tp.typeSymbol + ", " + tp.typeSymbol.ownerChain)
- !isValueClass(tp.typeSymbol) && isBoundedGeneric(tp)
- }
-
val specializedType = new TypeMap {
override def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if args.nonEmpty =>
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- // log("Mapping " + args.map(_.typeSymbol) + ", from " + sym + " with params " + sym.typeParams + " with annots " + sym.typeParams.map(_.annotations))
val args1 = (args zip sym.typeParams) map {
- case (x, orig) if isSpecializedOnAnyRef(orig) && subtypeOfAnyRef(x) => AnyRefClass.tpe // used to be: case x if x <:< AnyRefClass.tpe
- case (x, _) => x
+ case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
+ case (tp, _) => tp
}
- // log("!!! specializedType " + tp + ", " + pre1 + ", " + args1)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
- case Some(sym1) =>
- val surviving = survivingArgs(sym, args)
- // log("!!! survivers: " + surviving)
- assert(sym1.info.typeParams.length == surviving.length, sym1)
- typeRef(pre1, sym1, surviving)
- case None =>
- typeRef(pre1, sym, args)
+ case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args))
+ case None => typeRef(pre1, sym, args)
}
- case _ => tp // mapOver(tp)
+ case _ => tp
}
}
@@ -263,13 +237,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* type variables alphabetically.
*/
private def specializedName(sym: Symbol, env: TypeEnv): TermName = {
- val tvars = if (sym.isClass) env.keySet
- else specializedTypeVars(sym).intersect(env.keySet)
- val (methparams, others) = tvars.toList.partition(_.owner.isMethod)
- val tvars1 = methparams sortBy (_.name.toString)
- val tvars2 = others sortBy (_.name.toString)
- if (settings.debug.value) log("specName(%s) env: %s tvars: %s ".format(sym, env, (tvars1, tvars2)))
- specializedName(sym.name, tvars1 map env, tvars2 map env)
+ val tvars = (
+ if (sym.isClass) env.keySet
+ else specializedTypeVars(sym).intersect(env.keySet)
+ )
+ val (methparams, others) = tvars.toList sortBy ("" + _.name) partition (_.owner.isMethod)
+ debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars)
+
+ specializedName(sym.name, methparams map env, others map env)
}
/** Specialize name for the two list of types. The first one denotes
@@ -296,46 +271,38 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the types `sym' should be specialized at. This may be some of the primitive types
* or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a
* subtype of AnyRef (T <: AnyRef).
+ * These are in a meaningful order for stability purposes.
*/
- def concreteTypes(sym: Symbol): List[Type] = {
- sym.getAnnotation(SpecializedClass) match {
- case Some(AnnotationInfo(_, args, _)) =>
- args match {
- case Nil =>
- log(sym + " specialized on everything")
- primitiveTypes
- case _ =>
- val tpes = args map {
- t => if (t.symbol == Predef_AnyRef) {
- if (isBoundedGeneric(sym.tpe))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
- AnyRefClass.tpe
- } else t.symbol.companionClass.tpe
- }
- log(sym + " specialized on " + tpes)
- tpes
- }
- case _ =>
- Nil
+ def concreteTypes(sym: Symbol): List[Type] = (
+ if (!isSpecialized(sym)) Nil // no @specialized Annotation
+ else specializedOn(sym) match {
+ case Nil => primitiveTypes // specialized on everything
+ case args => // specialized on args
+ (args map { tp =>
+ if (tp.symbol == Predef_AnyRef) {
+ if (isBoundedGeneric(sym.tpe))
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+ AnyRefClass.tpe
+ }
+ else tp.symbol.companionClass.tpe
+ }).sorted
}
- }
+ )
/** Return a list of all type environments for all specializations
* of @specialized types in `tps'.
*/
private def specializations(tps: List[Symbol]): List[TypeEnv] = {
- val stps = tps filter isSpecialized
- count(stps, concreteTypes _) map (tps => immutable.HashMap(stps zip tps: _*))
- }
-
- /** Generate all arrangements with repetitions from the list of values,
- * with 'pos' positions. For example, count(2, List(1, 2)) yields
- * List(List(1, 1), List(1, 2), List(2, 1), List(2, 2))
- */
- private def count[A, V](xs: List[A], values: A => List[V]): List[List[V]] = {
- if (xs.isEmpty) Nil
- else if (xs.tail.isEmpty) values(xs.head) map (_ :: Nil)
- else for (v <- values(xs.head); vs <- count(xs.tail, values)) yield v :: vs
+ // the keys in each TypeEnv
+ val keys: List[Symbol] = tps filter isSpecialized
+ // creating each permutation of concrete types
+ def loop(ctypes: List[List[Type]]): List[List[Type]] = ctypes match {
+ case Nil => Nil
+ case set :: Nil => set map (x => List(x))
+ case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
+ }
+ // zip the keys with each permutation to create a TypeEnv
+ loop(keys map concreteTypes) map (keys zip _ toMap)
}
/** Does the given tpe need to be specialized in the environment 'env'?
@@ -350,18 +317,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
(isNormalizedMember(sym) && info(sym).typeBoundsIn(env))
}
- def isNormalizedMember(m: Symbol): Boolean =
- (m.hasFlag(SPECIALIZED) && (info.get(m) match {
- case Some(NormalizedMember(_)) => true
- case _ => false
- }))
-
-
- def specializedTypeVars(tpe: List[Type]): immutable.Set[Symbol] =
- tpe.foldLeft(immutable.ListSet.empty[Symbol]: immutable.Set[Symbol]) {
- (s, tp) => s ++ specializedTypeVars(tp)
- }
-
+ def isNormalizedMember(m: Symbol) = hasSpecializedFlag(m) && (info get m exists {
+ case NormalizedMember(_) => true
+ case _ => false
+ })
+ def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = {
+ val buf = Set.newBuilder[Symbol]
+ tpes foreach (tp => buf ++= specializedTypeVars(tp))
+ buf.result
+ }
def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] =
atPhase(currentRun.typerPhase)(specializedTypeVars(sym.info))
@@ -375,70 +339,57 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
specializedTypeVars(tpe.normalize)
- else if (sym.isTypeParameter && isSpecialized(sym)
- || (sym.isTypeSkolem && isSpecialized(sym.deSkolemize)))
- immutable.ListSet.empty + sym
+ else if (sym.isTypeParameter && isSpecialized(sym) || (sym.isTypeSkolem && isSpecialized(sym.deSkolemize)))
+ Set(sym)
else if (sym == ArrayClass)
specializedTypeVars(args)
- else {
- val extra = for ((tp, arg) <- sym.typeParams.zip(args) if isSpecialized(tp))
- yield specializedTypeVars(arg).toList
- immutable.ListSet.empty[Symbol] ++ extra.flatten
- }
-
- case PolyType(tparams, resTpe) =>
- specializedTypeVars(tparams map (_.info)) ++ specializedTypeVars(resTpe)
-
- case NullaryMethodType(resTpe) => // since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
- specializedTypeVars(resTpe)
- case MethodType(argSyms, resTpe) =>
- specializedTypeVars(argSyms map (_.tpe)) ++ specializedTypeVars(resTpe)
-
- case ExistentialType(_, res) => specializedTypeVars(res)
- case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
- case TypeBounds(hi, lo) => specializedTypeVars(hi) ++ specializedTypeVars(lo) // @I: it's (lo, hi) - swap them?
- case _ => immutable.ListSet.empty[Symbol]
+ else
+ specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if isSpecialized(tp) => arg })
+
+ case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: tparams.map(_.info))
+ // since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
+ case NullaryMethodType(resTpe) => specializedTypeVars(resTpe)
+ case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
+ case ExistentialType(_, res) => specializedTypeVars(res)
+ case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
+ case TypeBounds(lo, hi) => specializedTypeVars(List(lo, hi))
+ case _ => Set()
}
- // holds mappings from regular type parameter symbols to symbols of specialized type parameters
- // which are subtypes of AnyRef
+ // holds mappings from regular type parameter symbols to symbols of
+ // specialized type parameters which are subtypes of AnyRef
private val anyrefSpecCache = mutable.Map[Symbol, Symbol]()
+
/** Returns the type parameter in the specialized class `cls` that corresponds to type parameter
* `sym` in the original class. It will create it if needed or use the one from the cache.
*/
- private def typeParamSubAnyRef(sym: Symbol, cls: Symbol) = anyrefSpecCache.get(sym) match {
- case Some(tsub) => tsub.tpe
- case None =>
- val tparam = cls.newTypeParameter(sym.pos, newTypeName(sym.name.toString + "$sp"))
- tparam.setInfo(TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe))
- anyrefSpecCache.put(sym, tparam)
- tparam.tpe
- }
-
- /** Cleans the anyrefSpecCache of all type parameter symbols of a class */
- private def cleanAnyRefSpecCache(cls: Symbol, decls: List[Symbol]) = {
- // remove class type parameters
- cls.tpe match {
- case PolyType(tparams, _) => for (tp <- tparams) anyrefSpecCache.remove(tp)
- case _ =>
- }
-
- // remove mappings from normalized members to
- for (d <- decls) d.tpe match {
- case PolyType(tparams, _) => for (tp <- tparams) anyrefSpecCache.remove(tp)
- case _ =>
+ private def typeParamSubAnyRef(sym: Symbol, cls: Symbol) = (
+ anyrefSpecCache.getOrElseUpdate(sym,
+ cls.newTypeParameter(sym.pos, newTypeName(sym.name + "$sp"))
+ setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
+ ).tpe
+ )
+
+ /** Cleans the anyrefSpecCache of all type parameter symbols of a class.
+ */
+ private def cleanAnyRefSpecCache(cls: Symbol, decls: List[Symbol]) = (
+ // remove class type parameters and those of normalized members.
+ cls :: decls foreach {
+ _.tpe match {
+ case PolyType(tparams, _) => anyrefSpecCache --= tparams
+ case _ => ()
+ }
}
- }
+ )
// holds mappings from members to the type variables in the class that they were already specialized for,
// so that they don't get specialized twice (this is for AnyRef specializations)
- private val wasSpecializedForTypeVars = mutable.Map[Symbol, immutable.Set[Symbol]]() withDefaultValue immutable.Set[Symbol]()
-
- def isPrimitive(tpe: Type) = primitiveTypes contains tpe
+ private val wasSpecializedForTypeVars =
+ mutable.Map[Symbol, immutable.Set[Symbol]]() withDefaultValue immutable.Set[Symbol]()
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
- params.filter(p => !isSpecialized(p) || !isPrimitive(env(p)))
+ params.filter(p => !isSpecialized(p) || !isScalaValueType(env(p)))
/** Produces the symbols from type parameters `syms` of the original owner,
* in the given type environment `env`. The new owner is `nowner`.
@@ -535,7 +486,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (p.typeSymbol.isTrait) res += stp
else if (currentRun.compiles(clazz))
reporter.warning(clazz.pos, p.typeSymbol + " must be a trait. Specialized version of "
- + clazz + " will inherit generic " + p) // TODO change to error
+ + clazz + " will inherit generic " + p) // TODO change to error
}
res.reverse.toList
}
@@ -550,9 +501,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (newClassTParams.isEmpty) infoType else PolyType(newClassTParams, infoType)
}
- // log("specializedClass " + cls + ": " + specializedInfoType)
atPhase(phase.next)(cls.setInfo(specializedInfoType))
-
val fullEnv = outerEnv ++ env
/** Enter 'sym' in the scope of the current specialized class. It's type is
@@ -587,56 +536,57 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
*/
def forwardToOverload(m: Symbol): Symbol = {
- // log("forw. to overload " + m)
- val specMember = enterMember(m.cloneSymbol(cls)).setFlag(OVERRIDE | SPECIALIZED).resetFlag(DEFERRED | CASEACCESSOR) // m1
- val om = specializedOverload(cls, m, env).setFlag(OVERRIDE)
- // log("created specialized overload" + om)
+ val specMember = (
+ enterMember(m cloneSymbol cls)
+ setFlag (OVERRIDE | SPECIALIZED)
+ resetFlag (DEFERRED | CASEACCESSOR)
+ ) // m1
- var original = info.get(m) match {
+ val om = specializedOverload(cls, m, env).setFlag(OVERRIDE)
+ val original = info.get(m) match {
case Some(NormalizedMember(tg)) => tg
- case _ => m
+ case _ => m
}
+ info(specMember) = Forward(om)
+ info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
+ typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- info(specMember) = Forward(om)
- info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
- typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) = Overload(om, typeEnv(om)) :: overloads(specMember)
-
+ overloads(specMember) ::= Overload(om, typeEnv(om))
enterMember(om)
}
- log("specializedClass: " + cls)
- for (m <- normMembers if needsSpecialization(outerEnv ++ env, m) && satisfiable(fullEnv)) {
- // log(" * looking at: " + m + "; wasspecfor: " + wasSpecializedForTypeVars(m))
- if (!m.isDeferred) addConcreteSpecMethod(m)
-
+ for (m <- normMembers ; if needsSpecialization(outerEnv ++ env, m) && satisfiable(fullEnv)) {
+ if (!m.isDeferred)
+ addConcreteSpecMethod(m)
// specialized members have to be overridable.
- if (m.hasFlag(PRIVATE))
+ if (m.isPrivate)
m.resetFlag(PRIVATE).setFlag(PROTECTED)
if (m.isConstructor) {
- val specCtor = enterMember(m.cloneSymbol(cls).setFlag(SPECIALIZED))
+ val specCtor = enterMember(m.cloneSymbol(cls) setFlag SPECIALIZED)
info(specCtor) = Forward(m)
-
- } else if (isNormalizedMember(m)) { // methods added by normalization
+ }
+ else if (isNormalizedMember(m)) { // methods added by normalization
val NormalizedMember(original) = info(m)
- if (!conflicting(env ++ typeEnv(m))) {
+ if (nonConflicting(env ++ typeEnv(m))) {
if (info(m).degenerate) {
- if (settings.debug.value) log("degenerate normalized member " + m + " info(m): " + info(m))
+ debuglog("degenerate normalized member " + m + " info(m): " + info(m))
val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- info(specMember) = Implementation(original)
+
+ info(specMember) = Implementation(original)
typeEnv(specMember) = env ++ typeEnv(m)
- } else {
- // log("om")
- val om = forwardToOverload(m)
- if (settings.debug.value) log("normalizedMember " + m + " om: " + om + " typeEnv(om): " + typeEnv(om))
}
- } else
+ else debuglog({
+ val om = forwardToOverload(m)
+ "normalizedMember " + m + " om: " + om + " typeEnv(om): " + typeEnv(om)
+ })
+ }
+ else
log("conflicting env for " + m + " env: " + env)
-
- } else if (m.isDeferred) { // abstract methods
+ }
+ else if (m.isDeferred) { // abstract methods
val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- if (settings.debug.value) log("deferred " + specMember.fullName + " is forwarded")
+ debuglog("deferred " + specMember.fullName + " is forwarded")
info(specMember) = new Forward(specMember) {
override def target = m.owner.info.member(specializedName(m, env))
@@ -648,20 +598,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else if (m.isValue && !m.isMethod) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
- val sym = cls.newMethod(field.pos, name)
- .setFlag(SPECIALIZED | m.getter(clazz).flags)
- .resetFlag(LOCAL | PARAMACCESSOR | CASEACCESSOR | LAZY) // we rely on the super class to initialize param accessors
+ val sym = (
+ cls.newMethod(field.pos, name)
+ setFlag (SPECIALIZED | m.getter(clazz).flags)
+ resetFlag (LOCAL | PARAMACCESSOR | CASEACCESSOR | LAZY)
+ // we rely on the super class to initialize param accessors
+ )
info(sym) = SpecializedAccessor(field)
sym
}
-
def overrideIn(clazz: Symbol, sym: Symbol) = {
- val sym1 = sym.cloneSymbol(clazz)
- .setFlag(OVERRIDE | SPECIALIZED)
- .resetFlag(DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
- sym1.setInfo(sym1.info.asSeenFrom(clazz.tpe, sym1.owner))
+ val sym1 = (
+ sym cloneSymbol clazz
+ setFlag (OVERRIDE | SPECIALIZED)
+ resetFlag (DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
+ )
+ sym1 setInfo sym1.info.asSeenFrom(clazz.tpe, sym1.owner)
}
-
val specVal = specializedOverload(cls, m, env)
addConcreteSpecMethod(m)
@@ -669,21 +622,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
enterMember(specVal)
// create accessors
- if (settings.debug.value)
- log("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
+ debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)).setInfo(MethodType(List(), specVal.info))
val origGetter = overrideIn(cls, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
enterMember(origGetter)
- if (settings.debug.value) log("created accessors: " + specGetter + " orig: " + origGetter)
+ debuglog("created accessors: " + specGetter + " orig: " + origGetter)
clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa =>
val cfaGetter = overrideIn(cls, cfa)
info(cfaGetter) = SpecializedAccessor(specVal)
enterMember(cfaGetter)
- if (settings.debug.value) log("found case field accessor for " + m + " added override " + cfaGetter);
+ debuglog("found case field accessor for " + m + " added override " + cfaGetter);
}
if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
@@ -712,49 +664,40 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
cls
}
- log("specializeClass " + clazz.fullName)
- val decls1 = (clazz.info.decls.toList flatMap { m: Symbol =>
+ val decls1 = clazz.info.decls.toList flatMap { m: Symbol =>
if (m.isAnonymousClass) List(m) else {
normalizeMember(m.owner, m, outerEnv) flatMap { normalizedMember =>
val ms = specializeMember(m.owner, normalizedMember, outerEnv, clazz.info.typeParams)
- // log(">>> specialized: " + normalizedMember + " into " + ms)
-// atPhase(currentRun.typerPhase)(println("normalizedMember.info: " + normalizedMember.info)) - bring the info to the typer phase
// interface traits have concrete members now
if (ms.nonEmpty && clazz.isTrait && clazz.isInterface)
clazz.resetFlag(INTERFACE)
if (normalizedMember.isMethod) {
val newTpe = subst(outerEnv, normalizedMember.info)
- if (newTpe != normalizedMember.info) // only do it when necessary, otherwise the method type might be at a later phase already
- normalizedMember.updateInfo(newTpe) :: ms
- else
- normalizedMember :: ms
- } else
- normalizedMember :: ms
+ // only do it when necessary, otherwise the method type might be at a later phase already
+ if (newTpe != normalizedMember.info) {
+ normalizedMember updateInfo newTpe
+ }
+ }
+ normalizedMember :: ms
}
}
- })
- //log("decls before: " + decls1 + ", overloads: " + overloads.mkString("\n"))
-
- var hasSubclasses = false
- // log("For " + clazz + " - " + specializations(clazz.info.typeParams))
- for (env <- specializations(clazz.info.typeParams) if satisfiable(env)) {
- val spc = specializedClass(env, decls1)
- // log("entered " + spc + " in " + clazz.owner)
- hasSubclasses = true
+ }
+
+ val subclasses = specializations(clazz.info.typeParams) filter satisfiable
+ subclasses foreach { env =>
+ val spc = specializedClass(env, decls1)
val existing = clazz.owner.info.decl(spc.name)
+
// a symbol for the specialized class already exists if there's a classfile for it.
// keeping both crashes the compiler on test/files/pos/spec-Function1.scala
- if (existing != NoSymbol) {
- // log("removing existing symbol for "+ existing)
+ if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- }
- atPhase(phase.next)(clazz.owner.info.decls enter spc) //!! assumes fully specialized classes
+ atPhase(phase.next)(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
- if (hasSubclasses) clazz.resetFlag(FINAL)
+ if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
- //log("decls: " + decls1 + ", overloads: " + overloads.mkString("\n"))
decls1
}
@@ -768,42 +711,50 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* // etc.
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
- if (settings.debug.value) log("normalizeMember: " + sym.fullName)
- if (sym.isMethod && !atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) {
- var (specializingOn, _) = splitParams(sym.info.typeParams)
- val unusedStvars = specializingOn filterNot (specializedTypeVars(sym.info).toList contains)
- if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
- reporter.warning(sym.pos, "%s %s unused or used in non-specializable positions."
- .format(unusedStvars.mkString("", ", ", ""), if (unusedStvars.length == 1) "is" else "are"))
- unusedStvars foreach (_.removeAnnotation(SpecializedClass))
- specializingOn = specializingOn filterNot (unusedStvars contains)
- // tps = sym.info.typeParams filterNot (_.hasAnnotation(SpecializedClass))
+ debuglog("normalizeMember: " + sym.fullName)
+ sym :: (
+ if (!sym.isMethod || atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) Nil
+ else {
+ var specializingOn = specializedParams(sym)
+ val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
+
+ if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
+ reporter.warning(sym.pos,
+ "%s %s unused or used in non-specializable positions.".format(
+ unusedStvars.mkString("", ", ", ""),
+ if (unusedStvars.length == 1) "is" else "are")
+ )
+ unusedStvars foreach (_ removeAnnotation SpecializedClass)
+ specializingOn = specializingOn filterNot (unusedStvars contains)
+ }
+ for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
+ val tps = survivingParams(sym.info.typeParams, env0)
+ val specMember = sym.cloneSymbol(owner).setFlag(SPECIALIZED).resetFlag(DEFERRED)
+ val env = mapAnyRefsInSpecSym(env0, sym, specMember)
+ val (keys, vals) = env.toList.unzip
+
+ specMember.name = specializedName(sym, env)
+ log("normalizing: " + sym + " to " + specMember + " with params " + tps)
+
+ typeEnv(specMember) = outerEnv ++ env
+ val tps1 = produceTypeParameters(tps, specMember, env)
+ tps1 foreach (tp => tp.setInfo(tp.info.subst(keys, vals)))
+
+ // the cloneInfo is necessary so that method parameter symbols are cloned at the new owner
+ val methodType = sym.info.resultType.subst(keys ++ tps, vals ++ tps1.map(_.tpe)).cloneInfo(specMember)
+ specMember setInfo polyType(tps1, methodType)
+
+ debuglog("expanded member: " + sym + ": " + sym.info +
+ " -> " + specMember +
+ ": " + specMember.info +
+ " env: " + env
+ )
+ info(specMember) = NormalizedMember(sym)
+ overloads(sym) ::= Overload(specMember, env)
+ specMember
+ }
}
- val res = sym :: (for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
- val tps = survivingParams(sym.info.typeParams, env0)
- val specMember = sym.cloneSymbol(owner).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- val env = mapAnyRefsInSpecSym(env0, sym, specMember)
- val keys = env.keysIterator.toList;
- val vals = env.valuesIterator.toList
- specMember.name = specializedName(sym, env)
- log("normalizing: " + sym + " to " + specMember + " with params " + tps)
-
- typeEnv(specMember) = outerEnv ++ env
- val tps1 = produceTypeParameters(tps, specMember, env) // cloneSymbols(tps)
- for (tp <- tps1) tp.setInfo(tp.info.subst(keys, vals))
- // the cloneInfo is necessary so that method parameter symbols are cloned at the new owner
- val methodType = sym.info.resultType.subst(keys ::: tps, vals ::: (tps1 map (_.tpe))).cloneInfo(specMember)
-
- specMember.setInfo(polyType(tps1, methodType))
-
- if (settings.debug.value) log("expanded member: " + sym + ": " + sym.info + " -> " + specMember + ": " + specMember.info + " env: " + env)
- info(specMember) = NormalizedMember(sym)
- overloads(sym) = Overload(specMember, env) :: overloads(sym)
- specMember
- })
- //stps foreach (_.removeAttribute(SpecializedClass))
- res
- } else List(sym)
+ )
}
/** Specialize member `m' w.r.t. to the outer environment and the type
@@ -817,40 +768,46 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializeOn(tparams: List[Symbol]): List[Symbol] =
for (spec0 <- specializations(tparams)) yield {
val spec = mapAnyRefsInOrigCls(spec0, owner)
- // log("outer env: " + outerEnv)
- // log(spec)
- if (sym.hasFlag(PRIVATE)) sym.resetFlag(PRIVATE).setFlag(PROTECTED)
+ if (sym.isPrivate)
+ sym.resetFlag(PRIVATE).setFlag(PROTECTED)
+
sym.resetFlag(FINAL)
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
+
log("sym " + specMember + " was specialized for type vars " + wasSpecializedForTypeVars(specMember))
- if (settings.debug.value) log("added specialized overload: %s in env: %s".format(specMember, typeEnv(specMember)))
- overloads(sym) = Overload(specMember, spec) :: overloads(sym)
- // log("member spec: " + sym + " with owner " + sym.owner + " in env " + spec + " is " + specMember + " with tp " + specMember.tpe)
+ debuglog("added specialized overload: %s in env: %s".format(specMember, typeEnv(specMember)))
+
+ overloads(sym) ::= Overload(specMember, spec)
specMember
}
if (sym.isMethod) {
- if (settings.debug.value) log("specializeMember %s with tps: %s stvars(sym): %s".format(sym, tps, specializedTypeVars(sym)))
- val tps1 = if (sym.isConstructor) tps filter (tp => sym.info.paramTypes.contains(tp)) else tps
+ debuglog("specializeMember %s with tps: %s stvars(sym): %s".format(sym, tps, specializedTypeVars(sym)))
+
+ val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
val tps2 = tps1 intersect specializedTypeVars(sym).toList
- if (!sym.isDeferred) addConcreteSpecMethod(sym)
+ if (!sym.isDeferred)
+ addConcreteSpecMethod(sym)
- val lst = specializeOn(tps2) map {m => info(m) = SpecialOverload(sym, typeEnv(m)); m}
- lst
- } else
- List()
+ val ms = specializeOn(tps2)
+ ms foreach (m => info(m) = SpecialOverload(sym, typeEnv(m)))
+ ms
+ }
+ else Nil
}
/** Return the specialized overload of `m', in the given environment. */
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
- val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
+ val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
specMember.name = specializedName(sym, env)
- specMember.setInfo(subst(env, specMember.info.asSeenFrom(owner.thisType, sym.owner)))
- .setFlag(SPECIALIZED)
- .resetFlag(DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
+ (specMember
+ setInfo subst(env, specMember.info.asSeenFrom(owner.thisType, sym.owner))
+ setFlag (SPECIALIZED)
+ resetFlag (DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
+ )
}
/** For each method m that overrides inherited method m', add a special
@@ -864,8 +821,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* this method will return List('apply$spec$II')
*/
private def specialOverrides(clazz: Symbol): List[Symbol] = {
- // log("--> specialOverrides(" + clazz + ")")
-
/** Return the overridden symbol in syms that needs a specialized overriding symbol,
* together with its specialization environment. The overridden symbol may not be
* the closest to 'overriding', in a given hierarchy.
@@ -874,81 +829,80 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* * m overrides a method whose type contains specialized type variables
* * there is a valid specialization environment that maps the overridden method type to m's type.
*/
- def needsSpecialOverride(overriding: Symbol, syms: List[Symbol]): (Symbol, TypeEnv) = {
- def missingSpecializations(baseTvar: Symbol, derivedTvar: Symbol): immutable.Set[Type] = {
- val baseSet = concreteTypes(baseTvar).toSet
- val derivedSet = concreteTypes(derivedTvar).toSet
- baseSet diff derivedSet
- }
-
+ def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
- if (currentRun.compiles(overriding))
- for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams);
- val missing = missingSpecializations(baseTvar, derivedTvar)
- if missing.nonEmpty)
- reporter.error(derivedTvar.pos,
- "Type parameter has to be specialized at least for the same types as in the overridden method. Missing " +
- "types: " + missing.mkString("", ", ", ""))
+ for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams)) {
+ val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet
+ if (missing.nonEmpty) {
+ reporter.error(derivedTvar.pos,
+ "Type parameter has to be specialized at least for the same types as in the overridden method. Missing "
+ + "types: " + missing.mkString("", ", ", "")
+ )
+ }
+ }
}
-
- // log("checking: " + overriding + " - isParamAccessor: " + overriding.isParamAccessor)
- if (!overriding.isParamAccessor) for (overridden <- syms) {
- if (settings.debug.value)
- log("Overridden: " + overridden.fullName + ": " + overridden.info
- + "\n by " + overriding.fullName + ": " + overriding.info)
- val stvars = specializedTypeVars(overridden.info)
- if (!stvars.isEmpty) {
- if (settings.debug.value) log("\t\tspecializedTVars: " + stvars)
- checkOverriddenTParams(overridden)
-
- val env = unify(overridden.info, overriding.info, emptyEnv, false)
- if (settings.debug.value)
- log("\t\tenv: " + env + "isValid: " + TypeEnv.isValid(env, overridden)
- + "found: " + atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env))))
- if (!TypeEnv.restrict(env, stvars).isEmpty
- && TypeEnv.isValid(env, overridden)
- && atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env))) != NoSymbol)
- return (overridden, env)
+ if (!overriding.isParamAccessor) {
+ for (overridden <- overriding.allOverriddenSymbols) {
+ debuglog(
+ "Overridden: " + overridden.fullName +
+ ": " + overridden.info +
+ "\n by " + overriding.fullName +
+ ": " + overriding.info
+ )
+ val stvars = specializedTypeVars(overridden.info)
+ if (stvars.nonEmpty) {
+ debuglog("\t\tspecializedTVars: " + stvars)
+ if (currentRun compiles overriding)
+ checkOverriddenTParams(overridden)
+
+ val env = unify(overridden.info, overriding.info, emptyEnv, false)
+ def atNext = atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env)))
+
+ debuglog("\t\tenv: " + env + "isValid: " + TypeEnv.isValid(env, overridden) + "found: " + atNext)
+ if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol)
+ return (overridden, env)
+ }
}
}
(NoSymbol, emptyEnv)
}
-
- val oms = new mutable.ListBuffer[Symbol]
- for (overriding <- clazz.info.decls;
- val (overridden, env) = needsSpecialOverride(overriding, overriding.allOverriddenSymbols)
- if overridden != NoSymbol) {
- val om = specializedOverload(clazz, overridden, env)
- log("Added specialized overload %s for %s in env: %s with type: %s".format(om, overriding.fullName, env, om.info))
- typeEnv(om) = env
- addConcreteSpecMethod(overriding)
- if (!overriding.isDeferred) { // concrete method
- // if the override is a normalized member, 'om' gets the implementation from
- // its original target, and adds the environment of the normalized member (that is,
- // any specialized /method/ type parameter bindings)
- info(om) = info.get(overriding) match {
- case Some(NormalizedMember(target)) =>
- typeEnv(om) = env ++ typeEnv(overriding)
- log(info.get(overriding))
- SpecialOverride(target)
- case _ =>
- log(overriding + ", " + overriding.info)
- SpecialOverride(overriding)
- }
- info(overriding) = Forward(om)
- om setPos overriding.pos
- } else { // abstract override
- if (settings.debug.value) log("abstract override " + overriding.fullName + " with specialized " + om.fullName)
- info(om) = Forward(overriding)
+ (clazz.info.decls flatMap { overriding =>
+ needsSpecialOverride(overriding) match {
+ case (NoSymbol, _) => None
+ case (overridden, env) =>
+ val om = specializedOverload(clazz, overridden, env)
+ log("Added specialized overload %s for %s in env: %s with type: %s".format(om, overriding.fullName, env, om.info))
+ typeEnv(om) = env
+ addConcreteSpecMethod(overriding)
+ info(om) = (
+ if (overriding.isDeferred) { // abstract override
+ debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ Forward(overriding)
+ }
+ else {
+ // if the override is a normalized member, 'om' gets the
+ // implementation from its original target, and adds the
+ // environment of the normalized member (that is, any
+ // specialized /method/ type parameter bindings)
+ val impl = info get overriding match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ target
+ case _ =>
+ overriding
+ }
+ info(overriding) = Forward(om setPos overriding.pos)
+ SpecialOverride(impl)
+ }
+ )
+ overloads(overriding) ::= Overload(om, env)
+ ifDebug(atPhase(phase.next)(assert(
+ overridden.owner.info.decl(om.name) != NoSymbol,
+ "Could not find " + om.name + " in " + overridden.owner.info.decls))
+ )
+ Some(om)
}
- overloads(overriding) = Overload(om, env) :: overloads(overriding)
- oms += om
- atPhase(phase.next)(
- assert(overridden.owner.info.decl(om.name) != NoSymbol,
- "Could not find " + om.name + " in " + overridden.owner.info.decls))
- }
- log("special overrides(%s) = %s".format(clazz, oms))
- oms.toList
+ }).toList
}
case object UnifyError extends scala.util.control.ControlThrowable
@@ -959,15 +913,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* If `strict` is true, a UnifyError is thrown if unification is impossible.
*/
- private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = {
-// println("\tunify \t" + tp1 + "\n\t\t" + tp2)
- //log("Unify - " + (tp1.getClass, tp2.getClass))
- (tp1, tp2) match {
+ private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = (tp1, tp2) match {
case (TypeRef(_, sym1, _), _) if isSpecialized(sym1) =>
log("Unify - basic case: " + tp1 + ", " + tp2)
- if (definitions.isValueClass(tp2.typeSymbol))
- env + ((sym1, tp2))
- else if (isSpecializedOnAnyRef(sym1) && subtypeOfAnyRef(tp2)) // u.t.b. tp2 <:< AnyRefClass.tpe
+ if (isValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
else
if (strict) throw UnifyError else env
@@ -990,17 +939,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (PolyType(_, res), other) =>
unify(res, other, env, strict)
case (ThisType(_), ThisType(_)) => env
- case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
- case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
- case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
- case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
+ case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
+ case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
+ case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
+ case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
case (RefinedType(_, _), RefinedType(_, _)) => env
- case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
- case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
+ case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
+ case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
case _ =>
log("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
env
- }
}
private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv =
@@ -1008,7 +956,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (!strict) unify(args._1, args._2, env, strict)
else {
val nenv = unify(args._1, args._2, emptyEnv, strict)
- if (env.keySet.intersect(nenv.keySet) == Set()) env ++ nenv
+ if (env.keySet intersect nenv.keySet isEmpty) env ++ nenv
else {
log("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
throw UnifyError
@@ -1016,8 +964,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- private def specializedTypes(tps: List[Symbol]) = tps.filter(_.hasAnnotation(SpecializedClass))
-
/** Map class symbols to the type environments where they were created. */
val typeEnv: mutable.Map[Symbol, TypeEnv] = new mutable.HashMap[Symbol, TypeEnv] {
override def default(key: Symbol) = emptyEnv
@@ -1034,44 +980,38 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def subst(env: TypeEnv, tpe: Type): Type = {
class FullTypeMap(from: List[Symbol], to: List[Type]) extends SubstTypeMap(from, to) {
-
override def mapOver(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
- val parents1 = parents mapConserve (this);
+ val parents1 = parents mapConserve this
val declsList = decls.toList
- val decls1 = mapOver(declsList);
+ val decls1 = mapOver(declsList)
+
if ((parents1 eq parents) && (decls1 eq declsList)) tp
else ClassInfoType(parents1, new Scope(decls1), clazz)
+
case AnnotatedType(annots, atp, selfsym) =>
val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
+ val atp1 = this(atp)
+
if ((annots1 eq annots) && (atp1 eq atp)) tp
else if (annots1.isEmpty) atp1
- else if (atp1 ne atp) {
- val annots2 = annots1.filter(_.atp.typeSymbol != definitions.uncheckedVarianceClass)
- if (annots2.isEmpty) atp1
- else AnnotatedType(annots2, atp1, selfsym)
- } else
- AnnotatedType(annots1, atp1, selfsym)
-
+ else if (atp1 eq atp) AnnotatedType(annots1, atp1, selfsym)
+ else annots1.filter(_.atp.typeSymbol != uncheckedVarianceClass) match {
+ case Nil => atp1
+ case annots2 => AnnotatedType(annots2, atp1, selfsym)
+ }
case _ => super.mapOver(tp)
}
}
- // disabled because of bugs in std. collections
- //val (keys, values) = env.iterator.toList.unzip
- val keys = env.keysIterator.toList
- val values = env.valuesIterator.toList
+ val (keys, values) = env.toList.unzip
(new FullTypeMap(keys, values))(tpe)
-// tpe.subst(keys, values)
}
private def subst(env: TypeEnv)(decl: Symbol): Symbol = {
- val tpe = subst(env, decl.info)
- decl.setInfo(if (decl.isConstructor) tpe match {
- case MethodType(args, resTpe) => MethodType(args, decl.owner.tpe)
- } else tpe)
- // log((decl, decl.tpe.bounds.hi))
- // decl
+ decl setInfo (subst(env, decl.info) match {
+ case MethodType(args, _) if decl.isConstructor => MethodType(args, decl.owner.tpe)
+ case tpe => tpe
+ })
}
/** Checks if the type parameter symbol is not specialized
@@ -1101,40 +1041,33 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* If it is a 'no-specialization' run, it is applied only to loaded symbols.
*/
override def transformInfo(sym: Symbol, tpe: Type): Type = {
- val res = if (!settings.nospecialization.value || !currentRun.compiles(sym)) tpe match {
+ if (settings.nospecialization.value && currentRun.compiles(sym)) tpe
+ else tpe match {
case PolyType(targs, ClassInfoType(base, decls, clazz))
if clazz != RepeatedParamClass
&& clazz != JavaRepeatedParamClass
&& !clazz.isJavaDefined =>
val parents = base map specializedType
- // log("!!! %s[%s]) Parents: %s -> %s".format(sym, targs, base, parents))
- // for (t <- targs; p <- parents) notSpecializedIn(t, p)
- if (settings.debug.value) log("transformInfo (poly) " + clazz + " with parents1: " + parents + " ph: " + phase)
-// if (clazz.name.toString == "$colon$colon")
-// (new Throwable).printStackTrace
- polyType(targs, ClassInfoType(parents,
- new Scope(specializeClass(clazz, typeEnv(clazz)) ::: specialOverrides(clazz)),
- clazz))
+ debuglog("transformInfo (poly) " + clazz + " with parents1: " + parents + " ph: " + phase)
+ polyType(targs, ClassInfoType(
+ parents,
+ new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz)),
+ clazz)
+ )
case ClassInfoType(base, decls, clazz) if !clazz.isPackageClass && !clazz.isJavaDefined =>
- atPhase(phase.next)(base.map(_.typeSymbol.info))
+ atPhase(phase.next)(base map (_.typeSymbol.info))
+ // side effecting? parents is not used except to log.
val parents = base map specializedType
- if (settings.debug.value) log("transformInfo " + clazz + " with parents1: " + parents + " ph: " + phase)
- val res = ClassInfoType(base map specializedType,
- new Scope(specializeClass(clazz, typeEnv(clazz)) ::: specialOverrides(clazz)),
- clazz)
- res
-
+ debuglog("transformInfo " + clazz + " with parents1: " + parents + " ph: " + phase)
+ ClassInfoType(
+ base map specializedType,
+ new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz)),
+ clazz
+ )
case _ =>
tpe
- } else tpe
- res
-
- }
-
- def conflicting(env: TypeEnv): Boolean = {
- val silent = (pos: Position, str: String) => ()
- conflicting(env, silent)
+ }
}
/** Is any type variable in `env' conflicting with any if its type bounds, when
@@ -1142,13 +1075,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* A conflicting type environment could still be satisfiable.
*/
- def conflicting(env: TypeEnv, warn: (Position, String) => Unit): Boolean =
- env exists { case (tvar, tpe) =>
- if (!((subst(env, tvar.info.bounds.lo) <:< tpe)
- && (tpe <:< subst(env, tvar.info.bounds.hi)))) {
- warn(tvar.pos, "Bounds prevent specialization for " + tvar)
- true
- } else false
+ def conflicting(env: TypeEnv) = !nonConflicting(env)
+ def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) =>
+ (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))
}
/** The type environment is sound w.r.t. to all type bounds or only soft
@@ -1157,31 +1086,32 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* that does not fall within the bounds, but whose bounds contain
* type variables that are @specialized, (that could become satisfiable).
*/
- def satisfiable(env: TypeEnv, warn: (Position, String) => Unit): Boolean = {
+ def satisfiable(env: TypeEnv): Boolean = satisfiable(env, false)
+ def satisfiable(env: TypeEnv, warnings: Boolean): Boolean = {
def matches(tpe1: Type, tpe2: Type): Boolean = {
val t1 = subst(env, tpe1)
val t2 = subst(env, tpe2)
((t1 <:< t2)
- || !specializedTypeVars(t1).isEmpty
- || !specializedTypeVars(t2).isEmpty)
+ || specializedTypeVars(t1).nonEmpty
+ || specializedTypeVars(t2).nonEmpty)
}
env forall { case (tvar, tpe) =>
- ((matches(tvar.info.bounds.lo, tpe)
- && matches(tpe, tvar.info.bounds.hi))
- || { warn(tvar.pos, "Bounds prevent specialization of " + tvar);
- log("specvars: "
- + tvar.info.bounds.lo + ": " + specializedTypeVars(tvar.info.bounds.lo)
- + " " + subst(env, tvar.info.bounds.hi) + ": " + specializedTypeVars(subst(env, tvar.info.bounds.hi)))
- false })
+ matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || {
+ if (warnings)
+ reporter.warning(tvar.pos, "Bounds prevent specialization of " + tvar)
+
+ log("specvars: " +
+ tvar.info.bounds.lo + ": " +
+ specializedTypeVars(tvar.info.bounds.lo) + " " +
+ subst(env, tvar.info.bounds.hi) + ": " +
+ specializedTypeVars(subst(env, tvar.info.bounds.hi))
+ )
+ false
+ }
}
}
- def satisfiable(env: TypeEnv): Boolean = {
- val silent = (pos: Position, str: String) => ()
- satisfiable(env, silent)
- }
-
class Duplicator extends {
val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global
} with typechecker.Duplicators
@@ -1217,12 +1147,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def transform(tree: Tree): Tree = tree match {
case Select(qual, name) =>
val sym = tree.symbol
- if (sym.hasFlag(PRIVATE))
- if (settings.debug.value)
- log("seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b"
- .format(sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name)))
+ if (sym.isPrivate) debuglog(
+ "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format(
+ sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
+ )
if (shouldMakePublic(sym) && !isAccessible(sym)) {
- if (settings.debug.value) log("changing private flag of " + sym)
+ debuglog("changing private flag of " + sym)
sym.makeNotPrivate(sym.owner)
}
super.transform(tree)
@@ -1234,13 +1164,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the generic class corresponding to this specialized class. */
def originalClass(clazz: Symbol): Symbol =
- if (clazz.hasFlag(SPECIALIZED)) {
+ if (hasSpecializedFlag(clazz)) {
val (originalName, _, _) = nme.splitSpecializedName(clazz.name)
clazz.owner.info.decl(originalName).suchThat(_.isClass)
} else NoSymbol
def illegalSpecializedInheritance(clazz: Symbol): Boolean = {
- clazz.hasFlag(SPECIALIZED) && originalClass(clazz).info.parents.exists { p =>
+ hasSpecializedFlag(clazz) && originalClass(clazz).info.parents.exists { p =>
hasSpecializedParams(p.typeSymbol) && !p.typeSymbol.isTrait
}
}
@@ -1258,8 +1188,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def traverse(tree: Tree) = tree match {
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) {
- if (settings.debug.value)
- log("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
+ debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
parameters(tree.symbol) = vparamss map (_ map (_.symbol))
@@ -1333,23 +1262,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case TypeApply(Select(qual, name), targs)
if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
- if (settings.debug.value) log("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
+ debuglog("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
// log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
case Some(specMember) =>
- if (settings.debug.value) log("found " + specMember.fullName)
- assert(symbol.info.typeParams.length == targs.length)
- // log("!!! In TypeApply: " + specMember + "; " + symbol)
+ debuglog("found " + specMember.fullName)
+ ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
+
val env = typeEnv(specMember)
- // log("env: " + env)
- def isResidual(env: TypeEnv, tvar: Symbol) =
- !env.isDefinedAt(tvar) || (env.isDefinedAt(tvar) && !isValueClass(env(tvar).typeSymbol))
- val residualTargs =
- for ((tvar, targ) <- symbol.info.typeParams.zip(targs) if isResidual(env, tvar))
- yield targ
- assert(residualTargs.length == specMember.info.typeParams.length,
+ val residualTargs = symbol.info.typeParams zip targs collect {
+ case (tvar, targ) if !env.contains(tvar) || !isValueClass(env(tvar).typeSymbol) => targ
+ }
+
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
"residual: %s, tparams: %s, env: %s".format(residualTargs, symbol.info.typeParams, env))
+ )
+
val tree1 = maybeTypeApply(Select(qual1, specMember), residualTargs)
log("rewrote " + tree + " to " + tree1)
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
@@ -1364,15 +1293,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tree
case Select(qual, name) =>
- if (settings.debug.value)
- log("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(tree.pos.line, tree, symbol, symbol.info, tree.tpe))
+ debuglog("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(
+ tree.pos.line, tree, symbol, symbol.info, tree.tpe))
//log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
// log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
// log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- if (settings.debug.value) log("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
+ debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
if (!env.isEmpty) {
val specMember = overload(symbol, env)
//log("!!! found member: " + specMember)
@@ -1397,7 +1326,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else overloads(symbol).find(_.sym.info =:= symbol.info) match {
case Some(specMember) =>
val qual1 = transform(qual)
- if (settings.debug.value) log("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
+ debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
case None =>
super.transform(tree)
@@ -1442,7 +1371,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
- if (settings.debug.value) log("implementation: " + tree1)
+ debuglog("implementation: " + tree1)
val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
@@ -1456,7 +1385,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else {
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
- if (settings.debug.value) log("implementation: " + tree1)
+ debuglog("implementation: " + tree1)
val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
}
@@ -1469,14 +1398,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
- if (settings.debug.value)
- log("changed owners, now: " + tree1)
+ debuglog("changed owners, now: " + tree1)
val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
case SpecialOverload(original, env) =>
- if (settings.debug.value) log("completing specialized " + symbol.fullName + " calling " + original)
+ debuglog("completing specialized " + symbol.fullName + " calling " + original)
log("special overload " + original + " -> " + env)
val t = DefDef(symbol, { vparamss =>
val fun = Apply(Select(This(symbol.owner), original),
@@ -1488,14 +1416,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
symbol.owner.thisType.memberType(original).finalResultType)
})
log("created special overload tree " + t)
- if (settings.debug.value) log("created " + t)
+ debuglog("created " + t)
localTyper.typed(t)
case fwd @ Forward(_) =>
log("forward: " + fwd + ", " + ddef)
val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss)
- if (settings.debug.value)
- log("completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
+ debuglog("completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
case SpecializedAccessor(target) =>
@@ -1508,16 +1435,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
- assert(body.isDefinedAt(symbol.alias))
+ assert(body.isDefinedAt(symbol.alias), body)
val tree1 = treeCopy.ValDef(tree, mods, name, tpt, body(symbol.alias).duplicate)
- if (settings.debug.value) log("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
+ debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
val d = new Duplicator
- val tree2 = d.retyped(localTyper.context1.asInstanceOf[d.Context],
- tree1,
- symbol.alias.enclClass,
- symbol.enclClass,
- typeEnv(symbol.alias) ++ typeEnv(tree.symbol))
- val ValDef(mods1, name1, tpt1, rhs1) = tree2
+ val ValDef(mods1, name1, tpt1, rhs1) = d.retyped(
+ localTyper.context1.asInstanceOf[d.Context],
+ tree1,
+ symbol.alias.enclClass,
+ symbol.enclClass,
+ typeEnv(symbol.alias) ++ typeEnv(tree.symbol)
+ )
val t = treeCopy.ValDef(tree1, mods1, name1, tpt1, transform(rhs1))
log("valdef " + tree + " -> " + t)
t
@@ -1527,18 +1455,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// localTyper.typed(
// Apply(Select(Super(currentClass, nme.EMPTY), symbol.alias.getter(symbol.alias.owner)),
// List())))
-// if (settings.debug.value) log("replaced ValDef: " + tree1 + " in " + tree.symbol.owner.fullName)
+// debuglog("replaced ValDef: " + tree1 + " in " + tree.symbol.owner.fullName)
// tree1
case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
if (sup.symbol.info.parents != atPhase(phase.prev)(sup.symbol.info.parents)) =>
def parents = sup.symbol.info.parents
- if (settings.debug.value) log(tree + " parents changed from: " + atPhase(phase.prev)(parents) + " to: " + parents)
+ debuglog(tree + " parents changed from: " + atPhase(phase.prev)(parents) + " to: " + parents)
val res = localTyper.typed(
Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- if (settings.debug.value) log("retyping call to super, from: " + symbol + " to " + res.symbol)
+ debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
res
case _ =>
@@ -1555,28 +1483,19 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def duplicateBody(tree: DefDef, source: Symbol) = {
val symbol = tree.symbol
- val meth = addBody(tree, source)
- if (settings.debug.value) log("now typing: " + meth + " in " + symbol.owner.fullName)
+ val meth = addBody(tree, source)
+ debuglog("now typing: " + meth + " in " + symbol.owner.fullName)
+
val d = new Duplicator
- try {
- // log("duplicating tree: " + tree + "; " + symbol.owner)
- // log("source: " + source + "; owner: " + source.owner)
- // log("source encl class: " + source.enclClass)
- // log("symbol encl class: " + symbol.enclClass)
- // log(meth)
- d.retyped(localTyper.context1.asInstanceOf[d.Context],
- meth,
- source.enclClass,
- symbol.enclClass,
- typeEnv(source) ++ typeEnv(symbol))
- } catch {
- case e =>
- println("error compiling %s [%s]".format(unit, tree.pos))
- throw e
- }
+ d.retyped(
+ localTyper.context1.asInstanceOf[d.Context],
+ meth,
+ source.enclClass,
+ symbol.enclClass,
+ typeEnv(source) ++ typeEnv(symbol)
+ )
}
-
/** Put the body of 'source' as the right hand side of the method 'tree'.
* The destination method gets fresh symbols for type and value parameters,
* and the body is updated to the new symbols, and owners adjusted accordingly.
@@ -1585,13 +1504,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def addBody(tree: DefDef, source: Symbol): DefDef = {
val symbol = tree.symbol
- if (settings.debug.value) log("specializing body of" + symbol.fullName + ": " + symbol.info)
+ debuglog("specializing body of" + symbol.fullName + ": " + symbol.info)
val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
- val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitive(env(tparam)))
- if (settings.debug.value) log("substituting " + origtparams + " for " + symbol.typeParams)
+ val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isScalaValueType(env(tparam)))
+ debuglog("substituting " + origtparams + " for " + symbol.typeParams)
// skolemize type parameters
val (oldtparams, newtparams) = reskolemize(tparams)
@@ -1617,10 +1536,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
}
- def warn(clazz: Symbol)(pos: Position, err: String) =
- if (!clazz.hasFlag(SPECIALIZED))
- unit.warning(pos, err)
-
/** Create trees for specialized members of 'cls', based on the
* symbols that are already there.
*/
@@ -1634,7 +1549,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
for (m <- cls.info.decls.toList
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
- && satisfiable(typeEnv(m), warn(cls))) {
+ && satisfiable(typeEnv(m), !cls.hasFlag(SPECIALIZED))) {
log("creating tree for " + m.fullName)
if (m.isMethod) {
if (info(m).target.hasAccessorFlag) hasSpecializedFields = true
@@ -1760,40 +1675,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (currentRun.compiles(m)) concreteSpecMethods += m
}
- private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = {
- def needsCast(tp1: Type, tp2: Type): Boolean =
- !(tp1 <:< tp2)
-
+ private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = (
//! TODO: make sure the param types are seen from the right prefix
- for ((tp, arg) <- fun.info.paramTypes zip vparams) yield {
- if (needsCast(arg.tpe, tp)) {
- //log("tp: " + tp + " " + tp.typeSymbol.owner)
- gen.mkAsInstanceOf(Ident(arg), tp)
- } else Ident(arg)
- }
- }
-
+ for ((tp, arg) <- fun.info.paramTypes zip vparams) yield
+ gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)
+ )
private def findSpec(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- if (args.isEmpty) tp
- else {
- specializedType(tp)
- /*log("looking for " + specializedName(sym.name, args) + " in " + pre)
- val sym1 = pre.member(specializedName(sym.name, args))
- assert(sym1 != NoSymbol, "pre: " + pre.typeSymbol + " ph: " + phase + " with: " + pre.members)
- TypeRef(pre, sym1, Nil)*/
- }
- case _ => tp
+ case TypeRef(pre, sym, _ :: _) => specializedType(tp)
+ case _ => tp
}
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
override def transform(tree: Tree) =
if (settings.nospecialization.value) tree
- else atPhase(phase.next) {
- val res = specializeCalls(unit).transform(tree)
- res
- }
+ else atPhase(phase.next)(specializeCalls(unit).transform(tree))
}
def printSpecStats() {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 04752c5fd7..b62b2aa2a8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -468,8 +468,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
// ------ The tree transformers --------------------------------------------------------
def mainTransform(tree: Tree): Tree = {
-
- def withNeedLift(needLift: Boolean)(f: => Tree): Tree = {
+ @inline def withNeedLift(needLift: Boolean)(f: => Tree): Tree = {
val saved = needTryLift
needTryLift = needLift
try f
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 96d81d63a8..7910d307a9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -127,8 +127,10 @@ trait Contexts { self: Analyzer =>
var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
// for type parameters which are narrowed in a GADT
- var typingIndent: String = ""
+ var typingIndentLevel: Int = 0
+ def typingIndent = " " * typingIndentLevel
+ def undetparamsString = if (undetparams.isEmpty) "" else undetparams.mkString("undetparams=", ", ", "")
def undetparams = _undetparams
def undetparams_=(ps: List[Symbol]) = {
//System.out.println("undetparams = " + ps);//debug
@@ -141,6 +143,13 @@ trait Contexts { self: Analyzer =>
tparams
}
+ def withoutReportingErrors[T](op: => T): T = {
+ val saved = reportGeneralErrors
+ reportGeneralErrors = false
+ try op
+ finally reportGeneralErrors = saved
+ }
+
def withImplicitsDisabled[T](op: => T): T = {
val saved = implicitsEnabled
implicitsEnabled = false
@@ -191,7 +200,7 @@ trait Contexts { self: Analyzer =>
c.reportAmbiguousErrors = this.reportAmbiguousErrors
c.reportGeneralErrors = this.reportGeneralErrors
c.diagnostic = this.diagnostic
- c.typingIndent = typingIndent
+ c.typingIndentLevel = typingIndentLevel
c.implicitsEnabled = this.implicitsEnabled
c.checking = this.checking
c.retyping = this.retyping
@@ -211,8 +220,6 @@ trait Contexts { self: Analyzer =>
def makeNewImport(imp: Import): Context =
make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
-
-
def make(tree: Tree, owner: Symbol, scope: Scope): Context = {
if (tree == this.tree && owner == this.owner && scope == this.scope) this
else make0(tree, owner, scope)
@@ -325,28 +332,39 @@ trait Contexts { self: Analyzer =>
} else throw new TypeError(pos, msg)
}
- def outerContext(clazz: Symbol): Context = {
- var c = this
- while (c != NoContext && c.owner != clazz) c = c.outer.enclClass
- c
- }
-
def isLocal(): Boolean = tree match {
- case Block(_,_) => true
+ case Block(_,_) => true
case PackageDef(_, _) => false
- case EmptyTree => false
- case _ => outer.isLocal()
+ case EmptyTree => false
+ case _ => outer.isLocal()
+ }
+
+ // nextOuter determines which context is searched next for implicits
+ // (after `this`, which contributes `newImplicits` below.) In
+ // most cases, it is simply the outer context: if we're owned by
+ // a constructor, the actual current context and the conceptual
+ // context are different when it comes to scoping. The current
+ // conceptual scope is the context enclosing the blocks which
+ // represent the constructor body (TODO: why is there more than one
+ // such block in the outer chain?)
+ private def nextOuter = {
+ // Drop the constructor body blocks, which come in varying numbers.
+ // -- If the first statement is in the constructor, scopingCtx == (constructor definition)
+ // -- Otherwise, scopingCtx == (the class which contains the constructor)
+ val scopingCtx =
+ if (owner.isConstructor) nextEnclosing(c => !c.tree.isInstanceOf[Block])
+ else this
+
+ scopingCtx.outer
}
def nextEnclosing(p: Context => Boolean): Context =
if (this == NoContext || p(this)) this else outer.nextEnclosing(p)
- override def toString(): String = {
+ override def toString = (
if (this == NoContext) "NoContext"
- else owner.toString() + " @ " + tree.getClass() +
- " " + tree.toString() + ", scope = " + scope.## +
- " " + scope.toList + "\n:: " + outer.toString()
- }
+ else "Context(%s@%s scope=%s)".format(owner.fullName, tree.getClass.getName split "[.$]" last, scope.##)
+ )
/** Is `sub' a subclass of `base' or a companion object of such a subclass?
*/
@@ -498,7 +516,7 @@ trait Contexts { self: Analyzer =>
def resetCache() {
implicitsRunId = NoRunId
implicitsCache = null
- if (outer != null && outer != this) outer.resetCache
+ if (outer != null && outer != this) outer.resetCache()
}
/** A symbol `sym` qualifies as an implicit if it has the IMPLICIT flag set,
@@ -514,8 +532,8 @@ trait Contexts { self: Analyzer =>
})
private def collectImplicits(syms: List[Symbol], pre: Type, imported: Boolean = false): List[ImplicitInfo] =
- for (sym <- syms if isQualifyingImplicit(sym, pre, imported))
- yield new ImplicitInfo(sym.name, pre, sym)
+ for (sym <- syms if isQualifyingImplicit(sym, pre, imported)) yield
+ new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
val pre = imp.qual.tpe
@@ -538,16 +556,6 @@ trait Contexts { self: Analyzer =>
}
def implicitss: List[List[ImplicitInfo]] = {
- // nextOuter determines which context is searched next for implicits (after `this`, which contributes `newImplicits` below)
- // in most cases, it is simply the outer context
- // if we're owned by a constructor, the actual current context and the conceptual context are different when it comes to scoping:
- // the current conceptual scope is the context enclosing the blocks that represent the constructor body
- // (TODO: why is there more than one such block in the outer chain?)
- val scopingCtx =
- if(owner.isConstructor) nextEnclosing(c => !c.tree.isInstanceOf[Block]) // drop the constructor body blocks (they come in varying numbers depending on whether we are in the ctor call in the first statement or after)
- // scopingCtx == the constructor definition (if we were after the ctor call) or the class that contains this constructor (if we are in the ctor call)
- else this
- val nextOuter = scopingCtx.outer
if (implicitsRunId != currentRunId) {
implicitsRunId = currentRunId
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 3ae99a5bed..eee75e1b2a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -25,7 +25,7 @@ trait EtaExpansion { self: Analyzer =>
}
def unapply(tree: Tree): Option[(List[ValDef], Tree, List[Tree])] = tree match {
- case Function(vparams, Apply(fn, args)) if (vparams corresponds args)(isMatch) => // @PP: corresponds
+ case Function(vparams, Apply(fn, args)) if (vparams corresponds args)(isMatch) =>
Some((vparams, fn, args))
case _ =>
None
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 53b4f0dac6..7d4d42c224 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -28,9 +28,8 @@ trait Implicits {
import global._
import definitions._
-
- def traceImplicits = printTypings
- import global.typer.{printTyping, deindentTyping, indentTyping}
+ import typeDebug.{ ptTree, ptBlock, ptLine }
+ import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
@@ -46,16 +45,29 @@ trait Implicits {
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = {
- printTyping("Beginning implicit search for "+ tree +" expecting "+ pt + (if(isView) " looking for a view" else ""))
+ printInference("[inferImplicit%s] pt = %s".format(
+ if (isView) " view" else "", pt)
+ )
+ printTyping(
+ ptBlock("infer implicit" + (if (isView) " view" else ""),
+ "tree" -> tree,
+ "pt" -> pt,
+ "undetparams" -> context.outer.undetparams
+ )
+ )
indentTyping()
- val rawTypeStart = startCounter(rawTypeImpl)
+
+ val rawTypeStart = startCounter(rawTypeImpl)
val findMemberStart = startCounter(findMemberImpl)
- val subtypeStart = startCounter(subtypeImpl)
+ val subtypeStart = startCounter(subtypeImpl)
val start = startTimer(implicitNanos)
- if (traceImplicits && !tree.isEmpty && !context.undetparams.isEmpty)
- println("typing implicit with undetermined type params: "+context.undetparams+"\n"+tree)
+ if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
+ printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
+
val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit
+ printInference("[inferImplicit] result: " + result)
context.undetparams = context.undetparams filterNot result.subst.fromContains
+
stopTimer(implicitNanos, start)
stopCounter(rawTypeImpl, rawTypeStart)
stopCounter(findMemberImpl, findMemberStart)
@@ -87,7 +99,8 @@ trait Implicits {
* that were instantiated by the winning implicit.
*/
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
- override def toString = "SearchResult("+tree+", "+subst+")"
+ override def toString = "SearchResult(%s, %s)".format(tree,
+ if (subst.isEmpty) "" else subst)
}
lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter)
@@ -122,12 +135,9 @@ trait Implicits {
tp.isError
}
- def isCyclicOrErroneous = try {
- containsError(tpe)
- } catch {
- case ex: CyclicReference =>
- true
- }
+ def isCyclicOrErroneous =
+ try containsError(tpe)
+ catch { case _: CyclicReference => true }
override def equals(other: Any) = other match {
case that: ImplicitInfo =>
@@ -137,7 +147,7 @@ trait Implicits {
case _ => false
}
override def hashCode = name.## + pre.## + sym.##
- override def toString = "ImplicitInfo(" + name + "," + pre + "," + sym + ")"
+ override def toString = name + ": " + tpe
}
/** A sentinel indicating no implicit was found */
@@ -222,7 +232,15 @@ trait Implicits {
*/
class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context)
extends Typer(context0) {
- printTyping("begin implicit search: "+(tree, pt, isView, context.outer.undetparams))
+ printTyping(
+ ptBlock("new ImplicitSearch",
+ "tree" -> tree,
+ "pt" -> pt,
+ "isView" -> isView,
+ "context0" -> context0,
+ "undetparams" -> context.outer.undetparams
+ )
+ )
// assert(tree.isEmpty || tree.pos.isDefined, tree)
import infer._
@@ -324,20 +342,29 @@ trait Implicits {
if (isView) {
val found = pt.typeArgs(0)
val req = pt.typeArgs(1)
+ def defaultExplanation =
+ "Note that implicit conversions are not applicable because they are ambiguous:\n "+
+ coreMsg+"are possible conversion functions from "+ found+" to "+req
- /** A nice spot to explain some common situations a little
- * less confusingly.
- */
def explanation = {
- if ((found =:= AnyClass.tpe) && (AnyRefClass.tpe <:< req))
- "Note: Any is not implicitly converted to AnyRef. You can safely\n" +
- "pattern match x: AnyRef or cast x.asInstanceOf[AnyRef] to do so."
- else if ((found <:< AnyValClass.tpe) && (AnyRefClass.tpe <:< req))
- "Note: primitive types are not implicitly converted to AnyRef.\n" +
- "You can safely force boxing by casting x.asInstanceOf[AnyRef]."
- else
- "Note that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
+ val sym = found.typeSymbol
+ // Explain some common situations a bit more clearly.
+ if (AnyRefClass.tpe <:< req) {
+ if (sym == AnyClass || sym == UnitClass) {
+ "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
+ "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
+ }
+ else boxedClass get sym match {
+ case Some(boxed) =>
+ "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
+ "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
+ "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
+ "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
+ case _ =>
+ defaultExplanation
+ }
+ }
+ else defaultExplanation
}
typeErrorMsg(found, req) + "\n" + explanation
@@ -350,7 +377,6 @@ trait Implicits {
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
- /** Replace undetParams in type `tp` by Any/Nothing, according to variance */
def approximate(tp: Type) =
if (undetParams.isEmpty) tp
else tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
@@ -364,7 +390,8 @@ trait Implicits {
* @param info The given implicit info describing the implicit definition
* @pre <code>info.tpe</code> does not contain an error
*/
- private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult =
+ private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
+ printInference("[typedImplicit] " + info)
(context.openImplicits find { case (tp, sym) => sym == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
// println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
@@ -390,6 +417,7 @@ trait Implicits {
context.openImplicits = context.openImplicits.tail
}
}
+ }
/** Todo reconcile with definition of stability given in Types.scala */
private def isStable(tp: Type): Boolean = tp match {
@@ -443,9 +471,23 @@ trait Implicits {
private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
incCounter(plausiblyCompatibleImplicits)
-
- printTyping("typed impl for "+wildPt+"? "+info.name +":"+ depoly(info.tpe)+ " orig info= "+ info.tpe +"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List())+"/"+info.pre+"/"+isStable(info.pre))
- if (ptChecked || matchesPt(depoly(info.tpe), wildPt, List()) && isStable(info.pre))
+ printTyping(
+ ptBlock("typedImplicit0",
+ "info.name" -> info.name,
+ "info.tpe" -> depoly(info.tpe),
+ "ptChecked" -> ptChecked,
+ "pt" -> wildPt,
+ "orig" -> ptBlock("info",
+ "matchesPt" -> matchesPt(depoly(info.tpe), wildPt, Nil),
+ "undetParams" -> undetParams,
+ "isPlausiblyCompatible" -> isPlausiblyCompatible(info.tpe, wildPt),
+ "info.pre" -> info.pre,
+ "isStable" -> isStable(info.pre)
+ ).replaceAll("\\n", "\n ")
+ )
+ )
+
+ if (ptChecked || matchesPt(depoly(info.tpe), wildPt, Nil) && isStable(info.pre))
typedImplicit1(info)
else
SearchFailure
@@ -458,7 +500,10 @@ trait Implicits {
if (info.pre == NoPrefix) Ident(info.name)
else Select(gen.mkAttributedQualifier(info.pre), info.name)
}
- printTyping("typedImplicit0 typing"+ itree +" with wildpt = "+ wildPt +" from implicit "+ info.name+":"+info.tpe)
+ printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
+ typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
+ )
+
def fail(reason: String): SearchResult = {
if (settings.XlogImplicits.value)
inform(itree+" is not a valid implicit value for "+pt+" because:\n"+reason)
@@ -479,10 +524,14 @@ trait Implicits {
incCounter(typedImplicits)
- printTyping("typed implicit "+itree1+":"+itree1.tpe+", pt = "+wildPt)
+ printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
else adapt(itree1, EXPRmode, wildPt)
- printTyping("adapted implicit "+itree1.symbol+":"+itree2.tpe+" to "+wildPt)
+
+ printTyping("adapted implicit %s:%s to %s".format(
+ itree1.symbol, itree2.tpe, wildPt)
+ )
+
def hasMatchingSymbol(tree: Tree): Boolean = (tree.symbol == info.sym) || {
tree match {
case Apply(fun, _) => hasMatchingSymbol(fun)
@@ -492,11 +541,26 @@ trait Implicits {
}
}
- if (itree2.tpe.isError) SearchFailure
- else if (hasMatchingSymbol(itree1)) {
+ if (itree2.tpe.isError)
+ SearchFailure
+ else if (!hasMatchingSymbol(itree1))
+ fail("candidate implicit %s is shadowed by other implicit %s".format(
+ info.sym + info.sym.locationString, itree1.symbol + itree1.symbol.locationString))
+ else {
val tvars = undetParams map freshVar
+
if (matchesPt(itree2.tpe, pt.instantiateTypeParams(undetParams, tvars), undetParams)) {
- printTyping("tvars = "+tvars+"/"+(tvars map (_.constr)))
+ printInference(
+ ptBlock("matchesPt",
+ "itree1" -> itree1,
+ "tvars" -> tvars,
+ "undetParams" -> undetParams
+ )
+ )
+
+ if (tvars.nonEmpty)
+ printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
+
val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
false, lubDepth(List(itree2.tpe, pt)))
@@ -505,39 +569,42 @@ trait Implicits {
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
- val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
- var subst = EmptyTreeTypeSubstituter
- if (okParams.nonEmpty) {
- subst = new TreeTypeSubstituter(okParams, okArgs)
- subst traverse itree2
- }
+ // prototype == WildcardType: want to remove all inferred Nothings
+ val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs)
+ val subst: TreeTypeSubstituter =
+ if (okParams.isEmpty) EmptyTreeTypeSubstituter
+ else {
+ val subst = new TreeTypeSubstituter(okParams, okArgs)
+ subst traverse itree2
+ subst
+ }
- // #2421b: since type inference (which may have been performed during implicit search)
- // does not check whether inferred arguments meet the bounds of the corresponding parameter (see note in solvedTypes),
- // must check again here:
- // TODO: I would prefer to just call typed instead of duplicating the code here, but this is probably a hotspot (and you can't just call typed, need to force re-typecheck)
+ // #2421b: since type inference (which may have been
+ // performed during implicit search) does not check whether
+ // inferred arguments meet the bounds of the corresponding
+ // parameter (see note in solvedTypes), must check again
+ // here:
+ // TODO: I would prefer to just call typed instead of
+ // duplicating the code here, but this is probably a
+ // hotspot (and you can't just call typed, need to force
+ // re-typecheck)
+ // TODO: the return tree is ignored. This seems to make
+ // no difference, but it's bad practice regardless.
itree2 match {
- case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
+ case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
- case _ =>
+ case t => t
}
-
val result = new SearchResult(itree2, subst)
incCounter(foundImplicits)
- if (traceImplicits) println("RESULT = "+result)
- // println("RESULT = "+itree+"///"+itree1+"///"+itree2)//DEBUG
+ printInference("[typedImplicit1] SearchResult: " + result)
result
- } else {
- printTyping("incompatible: "+itree2.tpe+" does not match "+pt.instantiateTypeParams(undetParams, tvars))
-
- SearchFailure
}
+ else fail("incompatible: %s does not match expected type %s".format(
+ itree2.tpe, pt.instantiateTypeParams(undetParams, tvars)))
}
- else if (settings.XlogImplicits.value)
- fail("candidate implicit "+info.sym+info.sym.locationString+
- " is shadowed by other implicit: "+itree1.symbol+itree1.symbol.locationString)
- else SearchFailure
- } catch {
+ }
+ catch {
case ex: TypeError => fail(ex.getMessage())
}
}
@@ -655,6 +722,16 @@ trait Implicits {
// most frequent one first
matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg)
}
+ def eligibleString = {
+ val args = List(
+ "search" -> pt,
+ "target" -> tree,
+ "isView" -> isView
+ ) ++ eligible.map("eligible" -> _)
+
+ ptBlock("Implicit search in " + context, args: _*)
+ }
+ printInference(eligibleString)
/** Faster implicit search. Overall idea:
* - prune aggressively
@@ -841,6 +918,9 @@ trait Implicits {
getParts(tp.widen)
case _: SingletonType =>
getParts(tp.widen)
+ case HasMethodMatching(_, argtpes, restpe) =>
+ for (tp <- argtpes) getParts(tp)
+ getParts(restpe)
case RefinedType(ps, _) =>
for (p <- ps) getParts(p)
case AnnotatedType(_, t, _) =>
@@ -855,7 +935,7 @@ trait Implicits {
val infoMap = new InfoMap
getParts(tp)(infoMap, new mutable.HashSet(), Set())
- if (traceImplicits) println("companion implicits of "+tp+" = "+infoMap)
+ printInference("[companionImplicitMap] "+tp+" = "+infoMap)
infoMap
}
@@ -996,9 +1076,9 @@ trait Implicits {
inferImplicit(tree, appliedType(manifestClass.typeConstructor, List(tp)), true, false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
- def mot(tp0: Type)(implicit from: List[Symbol] = List(), to: List[Type] = List()): SearchResult = {
+ def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
implicit def wrapResult(tree: Tree): SearchResult =
- if (tree == EmptyTree) SearchFailure else new SearchResult(tree, new TreeTypeSubstituter(from, to))
+ if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
val tp1 = tp0.normalize
tp1 match {
@@ -1032,24 +1112,29 @@ trait Implicits {
} else if (sym.isExistentiallyBound && full) {
manifestFactoryCall("wildcardType", tp,
findManifest(tp.bounds.lo), findManifest(tp.bounds.hi))
- } else if(undetParams contains sym) { // looking for a manifest of a type parameter that hasn't been inferred by now, can't do much, but let's not fail
- mot(NothingClass.tpe)(sym :: from, NothingClass.tpe :: to) // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
+ }
+ // looking for a manifest of a type parameter that hasn't been inferred by now,
+ // can't do much, but let's not fail
+ else if (undetParams contains sym) {
+ // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
+ mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
} else {
- EmptyTree // a manifest should have been found by normal searchImplicit
+ // a manifest should have been found by normal searchImplicit
+ EmptyTree
}
case RefinedType(parents, decls) =>
// refinement is not generated yet
if (hasLength(parents, 1)) findManifest(parents.head)
- else if (full) manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
- else mot(erasure.erasure.intersectionDominator(parents))
+ else if (full) manifestFactoryCall("intersectionType", tp, parents map findSubManifest: _*)
+ else mot(erasure.erasure.intersectionDominator(parents), from, to)
case ExistentialType(tparams, result) =>
- mot(tp1.skolemizeExistential)
+ mot(tp1.skolemizeExistential, from, to)
case _ =>
EmptyTree
}
}
- mot(tp)
+ mot(tp, Nil, Nil)
}
def wrapResult(tree: Tree): SearchResult =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 6c27bcace6..3e13f6ddb1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -9,7 +9,9 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
+import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
import symtab.Flags._
+import scala.annotation.tailrec
/** This trait ...
*
@@ -18,10 +20,11 @@ import symtab.Flags._
*/
trait Infer {
self: Analyzer =>
+
import global._
import definitions._
-
- private final val inferInfo = false //@MDEBUG
+ import typer.printInference
+ import typeDebug.ptBlock
/* -- Type parameter inference utility functions --------------------------- */
@@ -69,13 +72,12 @@ trait Infer {
private class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
override def getMessage(): String = getmsg()
}
+ private def ifNoInstance[T](f: String => T): PartialFunction[Throwable, T] = {
+ case x: NoInstance => f(x.getMessage)
+ }
- /** map every TypeVar to its constraint.inst field.
+ /** Map every TypeVar to its constraint.inst field.
* throw a NoInstance exception if a NoType or WildcardType is encountered.
- *
- * @param tp ...
- * @return ...
- * @throws NoInstance
*/
object instantiate extends TypeMap {
private var excludedVars = immutable.Set[TypeVar]()
@@ -86,7 +88,7 @@ trait Infer {
if (constr.inst == NoType) {
throw new DeferredNoInstance(() =>
"no unique instantiation of type variable " + origin + " could be found")
- } else if (excludedVars contains tv) {
+ } else if (excludedVars(tv)) {
throw new NoInstance("cyclic instantiation")
} else {
excludedVars += tv
@@ -125,37 +127,37 @@ trait Infer {
}
}
- /** Solve constraint collected in types <code>tvars</code>.
+ /** Solve constraint collected in types `tvars`.
*
* @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to <code>tvars</code>
+ * @param tparams The type parameters corresponding to `tvars`
* @param variances The variances of type parameters; need to reverse
* solution direction for all contravariant variables.
- * @param upper When <code>true</code> search for max solution else min.
+ * @param upper When `true` search for max solution else min.
* @throws NoInstance
*/
def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
-// def boundsString(tvar: TypeVar) =
-// "\n "+
-// ((tvar.constr.loBounds map (_ + " <: " + tvar.origin.typeSymbol.name)) :::
-// (tvar.constr.hiBounds map (tvar.origin.typeSymbol.name + " <: " + _)) mkString ", ")
+
if (!solve(tvars, tparams, variances, upper, depth)) {
-// no panic, it's good enough to just guess a solution, we'll find out
-// later whether it works.
-// @M danger, Will Robinson! this means that you should never trust inferred type arguments!
-// need to call checkBounds on the args/typars or type1 on the tree for the expression that results from type inference
-// see e.g., #2421: implicit search had been ignoring this caveat
-// throw new DeferredNoInstance(() =>
-// "no solution exists for constraints"+(tvars map boundsString))
+ // no panic, it's good enough to just guess a solution, we'll find out
+ // later whether it works. *ZAP* @M danger, Will Robinson! this means
+ // that you should never trust inferred type arguments!
+ //
+ // Need to call checkBounds on the args/typars or type1 on the tree
+ // for the expression that results from type inference see e.g., #2421:
+ // implicit search had been ignoring this caveat
+ // throw new DeferredNoInstance(() =>
+ // "no solution exists for constraints"+(tvars map boundsString))
+ }
+ for (tvar <- tvars ; if tvar.constr.inst == tvar) {
+ if (tvar.origin.typeSymbol.info eq ErrorType)
+ // this can happen if during solving a cyclic type parameter
+ // such as T <: T gets completed. See #360
+ tvar.constr.inst = ErrorType
+ else
+ assert(false, tvar.origin+" at "+tvar.origin.typeSymbol.owner)
}
- for (tvar <- tvars)
- if (tvar.constr.inst == tvar)
- if (tvar.origin.typeSymbol.info eq ErrorType) {
- // this can happen if during solving a cyclic type parameter
- // such as T <: T gets completed. See #360
- tvar.constr.inst = ErrorType
- } else assert(false, tvar.origin+" at "+tvar.origin.typeSymbol.owner)
tvars map instantiate
}
@@ -263,23 +265,22 @@ trait Infer {
Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
}
new AccessError(tree, sym, pre,
- if (settings.check.isDefault) {
+ if (settings.check.isDefault)
analyzer.lastAccessCheckDetails
- } else {
- "\n because of an internal error (no accessible symbol):" +
- "\nsym = " + sym +
- "\nunderlying(sym) = " + underlying(sym) +
- "\npre = " + pre +
- "\nsite = " + site +
- "\ntree = " + tree +
- "\nsym.accessBoundary(sym.owner) = " + sym.accessBoundary(sym.owner) +
- "\nsym.ownerChain = " + sym.ownerChain +
- "\nsym.owner.thisType = " + sym.owner.thisType +
- "\ncontext.owner = " + context.owner +
- "\ncontext.outer.enclClass.owner = " + context.outer.enclClass.owner
- }
+ else
+ ptBlock("because of an internal error (no accessible symbol)",
+ "sym.ownerChain" -> sym.ownerChain,
+ "underlying(sym)" -> underlying(sym),
+ "pre" -> pre,
+ "site" -> site,
+ "tree" -> tree,
+ "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
+ "context.owner" -> context.owner,
+ "context.outer.enclClass.owner" -> context.outer.enclClass.owner
+ )
)
- } else {
+ }
+ else {
if(sym1.isTerm)
sym1.cookJavaRawInfo() // xform java rawtypes into existentials
@@ -308,6 +309,7 @@ trait Infer {
* they are: perhaps someone more familiar with the intentional distinctions
* can examine the now much smaller concrete implementations below.
*/
+/*
abstract class CompatibilityChecker {
def resultTypeCheck(restpe: Type, arg: Type): Boolean
def argumentCheck(arg: Type, param: Type): Boolean
@@ -317,7 +319,7 @@ trait Infer {
val MethodType(params, restpe) = tp
val TypeRef(pre, sym, args) = pt
- if (sym.isAliasType) apply(tp, pt.dealias)
+ if (sym.isAliasType) apply(tp, pt.normalize)
else if (sym.isAbstractType) apply(tp, pt.bounds.lo)
else {
val len = args.length - 1
@@ -353,7 +355,7 @@ trait Infer {
}
object isPlausiblyCompatible extends CompatibilityChecker {
- def resultTypeCheck(restpe: Type, arg: Type) = isPlausiblySubType(restpe, arg)
+ def resultTypeCheck(restpe: Type, arg: Type) = isPlausiblyCompatible(restpe, arg)
def argumentCheck(arg: Type, param: Type) = isPlausiblySubType(arg, param)
def lastChanceCheck(tp: Type, pt: Type) = false
}
@@ -367,21 +369,76 @@ trait Infer {
case _ => super.apply(tp, pt)
}
}
+*/
+ def isPlausiblyCompatible(tp: Type, pt: Type) = checkCompatibility(true, tp, pt)
+ def normSubType(tp: Type, pt: Type) = checkCompatibility(false, tp, pt)
+
+ @tailrec private def checkCompatibility(fast: Boolean, tp: Type, pt: Type): Boolean = tp match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit)
+ checkCompatibility(fast, restpe, pt)
+ else pt match {
+ case tr @ TypeRef(pre, sym, args) =>
+
+ if (sym.isAliasType) checkCompatibility(fast, tp, pt.normalize)
+ else if (sym.isAbstractType) checkCompatibility(fast, tp, pt.bounds.lo)
+ else {
+ val len = args.length - 1
+ hasLength(params, len) &&
+ sym == FunctionClass(len) && {
+ var ps = params
+ var as = args
+ if (fast) {
+ while (ps.nonEmpty && as.nonEmpty) {
+ if (!isPlausiblySubType(as.head, ps.head.tpe))
+ return false
+ ps = ps.tail
+ as = as.tail
+ }
+ } else {
+ while (ps.nonEmpty && as.nonEmpty) {
+ if (!(as.head <:< ps.head.tpe))
+ return false
+ ps = ps.tail
+ as = as.tail
+ }
+ }
+ ps.isEmpty && as.nonEmpty && {
+ val lastArg = as.head
+ as.tail.isEmpty && checkCompatibility(fast, restpe, lastArg)
+ }
+ }
+ }
+
+ case _ => if (fast) false else tp <:< pt
+ }
+ case NullaryMethodType(restpe) => checkCompatibility(fast, restpe, pt)
+ case PolyType(_, restpe) => checkCompatibility(fast, restpe, pt)
+ case ExistentialType(_, qtpe) => if (fast) checkCompatibility(fast, qtpe, pt) else normalize(tp) <:< pt // is !fast case needed??
+ case _ => if (fast) isPlausiblySubType(tp, pt) else tp <:< pt
+ }
+
/** This expresses more cleanly in the negative: there's a linear path
* to a final true or false.
*/
private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2)
- private def isImpossibleSubType(tp1: Type, tp2: Type) = {
- (tp1.dealias, tp2.dealias) match {
- case (TypeRef(_, sym1, _), TypeRef(_, sym2, _)) =>
- sym1.isClass &&
- sym2.isClass &&
- !(sym1 isSubClass sym2) &&
- !(sym1 isNumericSubClass sym2)
- case _ =>
- false
- }
+ private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.normalize.widen match {
+ case tr1 @ TypeRef(_, sym1, _) =>
+ // If the lhs is an abstract type, we can't rule out a subtype
+ // relationship because we don't know what it is.
+ !sym1.isAbstractType && (tp2.normalize.widen match {
+ case TypeRef(_, sym2, _) =>
+ sym1.isClass &&
+ sym2.isClass &&
+ !(sym1 isSubClass sym2) &&
+ !(sym1 isNumericSubClass sym2)
+ case RefinedType(parents, decls) =>
+ decls.nonEmpty &&
+ tr1.member(decls.head.name) == NoSymbol
+ case _ => false
+ })
+ case _ => false
}
def isCompatible(tp: Type, pt: Type): Boolean = {
@@ -444,12 +501,13 @@ trait Infer {
* @param pt ...
* @return ...
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, checkCompat: (Type, Type) => Boolean = isCompatible): List[Type] = {
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): List[Type] = {
val tvars = tparams map freshVar
- if (checkCompat(restpe.instantiateTypeParams(tparams, tvars), pt)) {
+ val instResTp = restpe.instantiateTypeParams(tparams, tvars)
+ if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
try {
// If the restpe is an implicit method, and the expected type is fully defined
- // optimze type variables wrt to the implicit formals only; ignore the result type.
+ // optimize type variables wrt to the implicit formals only; ignore the result type.
// See test pos/jesper.scala
val varianceType = restpe match {
case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
@@ -571,7 +629,7 @@ trait Infer {
tparam -> Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
- else if (targ.typeSymbol.isModuleClass) targ // this infers Foo.type instead of "object Foo"
+ else if (targ.typeSymbol.isModuleClass) targ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
else targ.widen
)
}
@@ -584,73 +642,82 @@ trait Infer {
* Undetermined type arguments are represented by `definitions.NothingClass.tpe'.
* No check that inferred parameters conform to their bounds is made here.
*
- * bq: was private, but need it for unapply checking
- *
* @param tparams the type parameters of the method
* @param formals the value parameter types of the method
* @param restp the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
* @return @see adjustTypeArgs
-
+ *
* @throws NoInstance
*/
def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = {
val tvars = tparams map freshVar
- if (inferInfo)
- println("methTypeArgs tparams = "+tparams+
- ", formals = "+formals+
- ", restpe = "+restpe+
- ", argtpes = "+argtpes+
- ", pt = "+pt+
- ", tvars = "+tvars+" "+(tvars map (_.constr)))
- if (!sameLength(formals, argtpes)) {
+ if (!sameLength(formals, argtpes))
throw new NoInstance("parameter lists differ in length")
- }
- if (inferInfo) // @MDEBUG
- println("methTypeArgs "+
- " tparams = "+tparams+"\n"+
- " formals = "+formals+"\n"+
- " restpe = "+restpe+"\n"+
- " restpe_inst = "+restpe.instantiateTypeParams(tparams, tvars)+"\n"+
- " argtpes = "+argtpes+"\n"+
- " pt = "+pt)
-
- // check first whether type variables can be fully defined from
- // expected result type.
- if (!isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt)) {
-// just wait and instantiate from the arguments.
-// that way, we can try to apply an implicit conversion afterwards.
-// This case could happen if restpe is not fully defined, so that
-// search for an implicit from it to pt fails because of an ambiguity.
-// See #0347. Therefore, the following two lines are commented out.
-// throw new DeferredNoInstance(() =>
-// "result type " + normalize(restpe) + " is incompatible with expected type " + pt)
- }
+ val restpeInst = restpe.instantiateTypeParams(tparams, tvars)
+ printInference(
+ ptBlock("methTypeArgs",
+ "tparams" -> tparams,
+ "formals" -> formals,
+ "restpe" -> restpe,
+ "restpeInst" -> restpeInst,
+ "argtpes" -> argtpes,
+ "pt" -> pt,
+ "tvars" -> tvars,
+ "constraints" -> tvars.map(_.constr)
+ )
+ )
+
+ // first check if typevars can be fully defined from the expected type.
+ // The return value isn't used so I'm making it obvious that this side
+ // effects, because a function called "isXXX" is not the most obvious
+ // side effecter.
+ isConservativelyCompatible(restpeInst, pt)
+
+ // Return value unused with the following explanation:
+ //
+ // Just wait and instantiate from the arguments. That way,
+ // we can try to apply an implicit conversion afterwards.
+ // This case could happen if restpe is not fully defined, so the
+ // search for an implicit from restpe => pt fails due to ambiguity.
+ // See #347. Therefore, the following two lines are commented out.
+ //
+ // throw new DeferredNoInstance(() =>
+ // "result type " + normalize(restpe) + " is incompatible with expected type " + pt)
+
for (tvar <- tvars)
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
(argtpes, formals).zipped map { (argtpe, formal) =>
- //@M isCompatible has side-effect: isSubtype0 will register subtype checks in the tvar's bounds
- if (!isCompatible(argtpe.deconst.instantiateTypeParams(tparams, tvars),
- formal.instantiateTypeParams(tparams, tvars))) {
+ val tp1 = argtpe.deconst.instantiateTypeParams(tparams, tvars)
+ val pt1 = formal.instantiateTypeParams(tparams, tvars)
+
+ // Note that isCompatible side-effects: subtype checks involving typevars
+ // are recorded in the typevar's bounds (see TypeConstraint)
+ if (!isCompatible(tp1, pt1)) {
throw new DeferredNoInstance(() =>
- "argument expression's type is not compatible with formal parameter type" +
- foundReqMsg(argtpe.deconst.instantiateTypeParams(tparams, tvars), formal.instantiateTypeParams(tparams, tvars)))
+ "argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1))
}
- ()
}
- if (inferInfo)
- println("solve "+tvars+" "+(tvars map (_.constr)))
- val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals),
- false, lubDepth(formals) max lubDepth(argtpes))
-// val res =
- adjustTypeArgs(tparams, targs, restpe)
-// println("meth type args "+", tparams = "+tparams+", formals = "+formals+", restpe = "+restpe+", argtpes = "+argtpes+", underlying = "+(argtpes map (_.widen))+", pt = "+pt+", uninstantiated = "+uninstantiated.toList+", result = "+res) //DEBUG
-// res
+ val targs = solvedTypes(
+ tvars, tparams, tparams map varianceInTypes(formals),
+ false, lubDepth(formals) max lubDepth(argtpes)
+ )
+ val result = adjustTypeArgs(tparams, targs, restpe)
+
+ printInference(
+ ptBlock("methTypeArgs result",
+ "tvars" -> tvars,
+ "constraints" -> tvars.map(_.constr),
+ "targs" -> targs,
+ "adjusted type args" -> result
+ )
+ )
+ result
}
private[typechecker] def followApply(tp: Type): Type = tp match {
@@ -683,9 +750,8 @@ trait Infer {
* - namesOK is false when there's an invalid use of named arguments
*/
private def checkNames(argtpes: List[Type], params: List[Symbol]) = {
- val argPos = (new Array[Int](argtpes.length)) map (x => -1)
- var positionalAllowed = true
- var namesOK = true
+ val argPos = Array.fill(argtpes.length)(-1)
+ var positionalAllowed, namesOK = true
var index = 0
val argtpes1 = argtpes map {
case NamedType(name, tp) => // a named argument
@@ -772,7 +838,7 @@ trait Infer {
try {
val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
// #2665: must use weak conformance, not regular one (follow the monomorphic case above)
- (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, isWeaklyCompatible) ne null) &&
+ (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true) ne null) &&
isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
} catch {
case ex: NoInstance => false
@@ -1055,8 +1121,8 @@ trait Infer {
errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages +=
(targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
(for ((a, p) <- arityMismatches)
- yield a+qualify(a,p)+ " has "+reporter.countElementsAsString(a.typeParams.length, "type parameter")+", but "+
- p+qualify(p,a)+" has "+reporter.countAsString(p.typeParams.length)).toList.mkString(", ") +
+ yield a+qualify(a,p)+ " has "+countElementsAsString(a.typeParams.length, "type parameter")+", but "+
+ p+qualify(p,a)+" has "+countAsString(p.typeParams.length)).toList.mkString(", ") +
(for ((a, p) <- varianceMismatches)
yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
@@ -1075,39 +1141,52 @@ trait Infer {
* attempts fail, an error is produced.
*/
def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) {
- if (inferInfo)
- println("infer argument instance "+tree+":"+tree.tpe+"\n"+
- " undetparams = "+undetparams+"\n"+
- " strict pt = "+strictPt+"\n"+
- " lenient pt = "+lenientPt)
+ printInference(
+ ptBlock("inferArgumentInstance",
+ "tree" -> tree,
+ "tree.tpe" -> tree.tpe,
+ "undetparams" -> undetparams,
+ "strictPt" -> strictPt,
+ "lenientPt" -> lenientPt
+ )
+ )
var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)
if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt)) {
targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)
}
substExpr(tree, undetparams, targs, lenientPt)
+ printInference("[inferArgumentInstance] finished, targs = " + targs)
}
-
/** Infer type arguments `targs` for `tparams` of polymorphic expression in `tree`, given prototype `pt`.
*
* Substitute `tparams` to `targs` in `tree`, after adjustment by `adjustTypeArgs`, returning the type parameters that were not determined
* If passed, infers against specified type `treeTp` instead of `tree.tp`.
*/
- def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, checkCompat: (Type, Type) => Boolean = isCompatible): List[Symbol] = {
+ def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
- if (inferInfo)
- println("infer expr instance "+tree+":"+tree.tpe+"\n"+
- " tparams = "+tparams+"\n"+
- " pt = "+pt)
-
- val targs = exprTypeArgs(tparams, treeTp, pt, checkCompat)
+ printInference(
+ ptBlock("inferExprInstance",
+ "tree" -> tree,
+ "tree.tpe"-> tree.tpe,
+ "tparams" -> tparams,
+ "pt" -> pt
+ )
+ )
+ val targs = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targs, pt)
List()
} else {
val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, targs)
- if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
+ printInference(
+ ptBlock("inferExprInstance/AdjustedTypeArgs",
+ "okParams" -> okParams,
+ "okArgs" -> okArgs,
+ "leftUndet" -> leftUndet
+ )
+ )
substExpr(tree, okParams, okArgs, pt)
leftUndet
}
@@ -1135,49 +1214,59 @@ trait Infer {
/** Substitute free type variables <code>undetparams</code> of application
* <code>fn(args)</code>, given prototype <code>pt</code>.
*
- * @param fn ...
- * @param undetparams ...
- * @param args ...
- * @param pt ...
+ * @param fn fn: the function that needs to be instantiated.
+ * @param undetparams the parameters that need to be determined
+ * @param args the actual arguments supplied in the call.
+ * @param pt the expected type of the function application
* @return The type parameters that remain uninstantiated,
* and that thus have not been substituted.
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
case MethodType(params0, _) =>
- if (inferInfo)
- println("infer method instance "+fn+"\n"+
- " undetparams = "+undetparams+"\n"+
- " args = "+args+"\n"+
- " pt = "+pt0)
+ printInference(
+ ptBlock("inferMethodInstance",
+ "fn" -> fn,
+ "undetparams" -> undetparams,
+ "args" -> args,
+ "pt0" -> pt0
+ )
+ )
+
try {
- val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+ val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(params0 map (_.tpe), args.length)
- val argtpes = actualTypes(args map (_.tpe.deconst), formals.length)
- val restpe = fn.tpe.resultType(argtpes)
- val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
+ val restpe = fn.tpe.resultType(argtpes)
+
+ val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
+ methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+
checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
val treeSubst = new TreeTypeSubstituter(okparams, okargs)
- treeSubst.traverse(fn)
- treeSubst.traverseTrees(args)
- if(leftUndet nonEmpty) { // #3890
- val leftUndet1 = treeSubst.typeSubst mapOver leftUndet
- if(leftUndet ne leftUndet1) {
- val symSubst = new TreeSymSubstTraverser(leftUndet, leftUndet1)
- symSubst.traverse(fn)
- symSubst.traverseTrees(args)
- }
- leftUndet1
- } else leftUndet
- } catch {
- case ex: NoInstance =>
- errorTree(fn,
- "no type parameters for " +
- applyErrorMsg(
- fn, " exist so that it can be applied to arguments ",
- args map (_.tpe.widen), WildcardType) +
- "\n --- because ---\n" + ex.getMessage())
- List()
+ treeSubst traverseTrees fn :: args
+
+ val result = leftUndet match {
+ case Nil => Nil
+ case xs =>
+ // #3890
+ val xs1 = treeSubst.typeSubst mapOver xs
+ if (xs ne xs1)
+ new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args
+
+ xs1
+ }
+ if (result.nonEmpty)
+ printInference("inferMethodInstance, still undetermined: " + result)
+
+ result
+ }
+ catch ifNoInstance { msg =>
+ errorTree(fn, "no type parameters for " +
+ applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) +
+ "\n --- because ---\n" + msg
+ )
+ Nil
}
}
@@ -1188,7 +1277,7 @@ trait Infer {
case TypeRef(_, sym, _) if sym.isAliasType =>
widen(tp.normalize)
case rtp @ RefinedType(parents, decls) =>
- copyRefinedType(rtp, parents mapConserve (widen), decls)
+ copyRefinedType(rtp, parents mapConserve widen, decls)
case AnnotatedType(_, underlying, _) =>
widen(underlying)
case _ =>
@@ -1322,6 +1411,7 @@ trait Infer {
} else {
for (arg <- args) {
if (sym == ArrayClass) check(arg, bound)
+ else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
else arg match {
case TypeRef(_, sym, _) if isLocalBinding(sym) =>
;
@@ -1497,14 +1587,11 @@ trait Infer {
*/
def inferExprAlternative(tree: Tree, pt: Type): Unit = tree.tpe match {
case OverloadedType(pre, alts) => tryTwice {
- var alts1 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ val secondTry = alts0.isEmpty
+ val alts1 = if (secondTry) alts else alts0
+
//println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
- val applicable = alts1
- var secondTry = false
- if (alts1.isEmpty) {
- alts1 = alts
- secondTry = true
- }
def improves(sym1: Symbol, sym2: Symbol): Boolean =
sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) ||
{ val tp1 = pre.memberType(sym1)
@@ -1512,9 +1599,12 @@ trait Infer {
(tp2 == ErrorType ||
!global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) ||
isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) }
+
val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) =>
if (improves(alt, best)) alt else best)
+
val competing = alts1 dropWhile (alt => best == alt || improves(best, alt))
+
if (best == NoSymbol) {
if (settings.debug.value) {
tree match {
@@ -1635,9 +1725,7 @@ trait Infer {
if (context.implicitsEnabled) {
val reportGeneralErrors = context.reportGeneralErrors
context.reportGeneralErrors = false
- try {
- context.withImplicitsDisabled(infer)
- }
+ try context.withImplicitsDisabled(infer)
catch {
case ex: CyclicReference => throw ex
case ex: TypeError =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 8f20f2a120..fddf115730 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -614,7 +614,6 @@ trait Namers { self: Analyzer =>
* modifier is the present means of signaling that a constant
* value should not be widened, so it has a use even in situations
* whether it is otherwise redundant (such as in a singleton.)
- * Locally defined symbols are also excluded from widening.
*/
private def widenIfNecessary(sym: Symbol, tpe: Type, pt: Type): Type = {
val getter =
@@ -631,13 +630,23 @@ trait Namers { self: Analyzer =>
case _ =>
false
}
+
val tpe1 = tpe.deconst
val tpe2 = tpe1.widen
- if ((sym.isVariable || sym.isMethod && !sym.hasAccessorFlag))
+
+ // This infers Foo.type instead of "object Foo"
+ // See Infer#adjustTypeArgs for the polymorphic case.
+ if (tpe.typeSymbolDirect.isModuleClass) tpe1
+ else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
if (tpe2 <:< pt) tpe2 else tpe1
else if (isHidden(tpe)) tpe2
- else if (sym.isFinal || sym.isLocal) tpe
- else tpe1
+ // In an attempt to make pattern matches involving method local vals
+ // compilable into switches, for a time I had a more generous condition:
+ // `if (sym.isFinal || sym.isLocal) tpe else tpe1`
+ // This led to issues with expressions like classOf[List[_]] which apparently
+ // depend on being deconst-ed here, so this is again the original:
+ else if (!sym.isFinal) tpe1
+ else tpe
}
// sets each ValDef's symbol
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index f1eb904c58..6f01139ac2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import collection.mutable.{HashSet, HashMap}
+import collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
@@ -75,6 +75,11 @@ abstract class RefChecks extends InfoTransform {
}
}
+ def accessFlagsToString(sym: Symbol) = flagsToString(
+ sym getFlag (PRIVATE | PROTECTED),
+ if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
+ )
+
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer;
@@ -83,32 +88,39 @@ abstract class RefChecks extends InfoTransform {
var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
- private def checkOverloadedRestrictions(clazz: Symbol) {
- def check(members: List[Symbol]): Unit = members match {
- case x :: xs =>
- if (x.hasParamWhich(_.hasDefaultFlag) && !nme.isProtectedAccessorName(x.name)) {
- val others = xs.filter(alt => {
- alt.name == x.name &&
- (alt hasParamWhich (_.hasDefaultFlag)) &&
- (!alt.isConstructor || alt.owner == x.owner) // constructors of different classes are allowed to have defaults
- })
- if (!others.isEmpty) {
- val all = x :: others
- val rest = if (all.exists(_.owner != clazz)) ".\nThe members with defaults are defined in "+
- all.map(_.owner).mkString("", " and ", ".") else "."
- unit.error(clazz.pos, "in "+ clazz +", multiple overloaded alternatives of "+ x +
- " define default arguments"+ rest)
- }
+ private def checkOverloadedRestrictions(clazz: Symbol): Unit = {
+ // Using the default getters (such as methodName$default$1) as a cheap way of
+ // finding methods with default parameters. This way, we can limit the members to
+ // those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods
+ // directly requires inspecting the parameter list of every one. That modification
+ // shaved 95% off the time spent in this method.
+ val defaultGetters = clazz.info.findMember(nme.ANYNAME, 0L, DEFAULTPARAM, false).alternatives
+ val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
+
+ defaultMethodNames.distinct foreach { name =>
+ val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
+ val haveDefaults = methods filter (sym => sym.hasParamWhich(_.hasDefaultFlag) && !nme.isProtectedAccessorName(sym.name))
+
+ if (haveDefaults.lengthCompare(1) > 0) {
+ val owners = haveDefaults map (_.owner)
+ // constructors of different classes are allowed to have defaults
+ if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) {
+ unit.error(clazz.pos,
+ "in "+ clazz +
+ ", multiple overloaded alternatives of "+ haveDefaults.head +
+ " define default arguments" + (
+ if (owners.forall(_ == clazz)) "."
+ else ".\nThe members with defaults are defined in "+owners.map(_.fullLocationString).mkString("", " and ", ".")
+ )
+ )
}
- check(xs)
- case _ => ()
+ }
}
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
val alts = clazz.info.decl(sym.name).alternatives
if (alts.size > 1)
alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
}
- check(clazz.info.members)
}
// Override checking ------------------------------------------------------------
@@ -268,11 +280,6 @@ abstract class RefChecks extends InfoTransform {
}
}
- def accessFlagsToString(sym: Symbol) = flagsToString(
- sym getFlag (PRIVATE | PROTECTED),
- if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
- )
-
def overrideAccessError() {
val otherAccess = accessFlagsToString(other)
overrideError("has weaker access privileges; it should be "+ (if (otherAccess == "") "public" else "at least "+otherAccess))
@@ -352,6 +359,11 @@ abstract class RefChecks extends InfoTransform {
overrideError("must be declared lazy to override a concrete lazy value")
} else {
checkOverrideTypes()
+ if (settings.warnNullaryOverride.value) {
+ if (other.paramss.isEmpty && !member.paramss.isEmpty) {
+ unit.warning(member.pos, "non-nullary method overrides nullary method")
+ }
+ }
}
}
@@ -495,6 +507,7 @@ abstract class RefChecks extends InfoTransform {
)
}
else if (underlying.isMethod) {
+
// If there is a concrete method whose name matches the unimplemented
// abstract method, and a cursory examination of the difference reveals
// something obvious to us, let's make it more obvious to them.
@@ -651,11 +664,13 @@ abstract class RefChecks extends InfoTransform {
* </ol>
*/
private def validateBaseTypes(clazz: Symbol) {
+ val seenParents = mutable.HashSet[Type]()
val seenTypes = new Array[List[Type]](clazz.info.baseTypeSeq.length)
- for (i <- 0 until seenTypes.length) seenTypes(i) = Nil
+ for (i <- 0 until seenTypes.length)
+ seenTypes(i) = Nil
/** validate all base types of a class in reverse linear order. */
- def register(tp: Type) {
+ def register(tp: Type): Unit = {
// if (clazz.fullName.endsWith("Collection.Projection"))
// println("validate base type "+tp)
val baseClass = tp.typeSymbol
@@ -667,7 +682,9 @@ abstract class RefChecks extends InfoTransform {
tp :: (seenTypes(index) filter (tp1 => !(tp <:< tp1)))
}
}
- tp.parents foreach register
+ val remaining = tp.parents filterNot seenParents
+ seenParents ++= remaining
+ remaining foreach register
}
register(clazz.tpe)
for (i <- 0 until seenTypes.length) {
@@ -694,7 +711,7 @@ abstract class RefChecks extends InfoTransform {
private val CoVariance = 1
private val AnyVariance = 2
- private val escapedPrivateLocals = new HashSet[Symbol]
+ private val escapedPrivateLocals = new mutable.HashSet[Symbol]
val varianceValidator = new Traverser {
@@ -844,7 +861,7 @@ abstract class RefChecks extends InfoTransform {
}
private var currentLevel: LevelInfo = null
- private val symIndex = new HashMap[Symbol, Int]
+ private val symIndex = new mutable.HashMap[Symbol, Int]
private def pushLevel() {
currentLevel = new LevelInfo(currentLevel)
@@ -1147,11 +1164,12 @@ abstract class RefChecks extends InfoTransform {
*/
private def checkDeprecated(sym: Symbol, pos: Position) {
if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
- val dmsg = sym.deprecationMessage map (": " + _) getOrElse ""
-
- unit.deprecationWarning(pos, sym.fullLocationString + " is deprecated" + dmsg)
+ unit.deprecationWarning(pos, "%s%s is deprecated%s".format(
+ sym, sym.locationString, sym.deprecationMessage map (": " + _) getOrElse "")
+ )
}
}
+
/** Similar to deprecation: check if the symbol is marked with @migration
* indicating it has changed semantics between versions.
*/
@@ -1160,6 +1178,60 @@ abstract class RefChecks extends InfoTransform {
unit.warning(pos, sym.fullLocationString + " has changed semantics:\n" + msg)
}
+ private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
+ (otherSym != NoSymbol)
+ && !otherSym.isTypeParameterOrSkolem
+ && !otherSym.isExistentiallyBound
+ && (otherSym isLessAccessibleThan memberSym)
+ && (otherSym isLessAccessibleThan memberSym.enclClass)
+ )
+ private def lessAccessibleSymsInType(other: Type, memberSym: Symbol): List[Symbol] = {
+ val extras = other match {
+ case TypeRef(pre, _, args) =>
+ // checking the prefix here gives us spurious errors on e.g. a private[process]
+ // object which contains a type alias, which normalizes to a visible type.
+ args filterNot (_ eq NoPrefix) flatMap (tp => lessAccessibleSymsInType(tp, memberSym))
+ case _ =>
+ Nil
+ }
+ if (lessAccessible(other.typeSymbol, memberSym)) other.typeSymbol :: extras
+ else extras
+ }
+ private def warnLessAccessible(otherSym: Symbol, memberSym: Symbol) {
+ val comparison = accessFlagsToString(memberSym) match {
+ case "" => ""
+ case acc => " is " + acc + " but"
+ }
+ val cannot =
+ if (memberSym.isDeferred) "may be unable to provide a concrete implementation of"
+ else "may be unable to override"
+
+ unit.warning(memberSym.pos,
+ "%s%s references %s %s.".format(
+ memberSym.fullLocationString, comparison,
+ accessFlagsToString(otherSym), otherSym
+ ) + "\nClasses which cannot access %s %s %s.".format(
+ otherSym.decodedName, cannot, memberSym.decodedName)
+ )
+ }
+ /** Warn about situations where a method signature will include a type which
+ * has more restrictive access than the method itself.
+ */
+ private def checkAccessibilityOfReferencedTypes(tree: Tree) {
+ val member = tree.symbol
+
+ // types of the value parameters
+ member.paramss.flatten foreach { p =>
+ val normalized = p.tpe.normalize
+ if ((normalized ne p.tpe) && lessAccessibleSymsInType(normalized, member).isEmpty) ()
+ else lessAccessibleSymsInType(p.tpe, member) foreach (sym => warnLessAccessible(sym, member))
+ }
+ // upper bounds of type parameters
+ member.typeParams.map(_.info.bounds.hi.widen) foreach { tp =>
+ lessAccessibleSymsInType(tp, member) foreach (sym => warnLessAccessible(sym, member))
+ }
+ }
+
/** Check that a deprecated val or def does not override a
* concrete, non-deprecated method. If it does, then
* deprecation is meaningless.
@@ -1184,6 +1256,7 @@ abstract class RefChecks extends InfoTransform {
case _ =>
false
}
+
private def checkTypeRef(tp: Type, pos: Position) = tp match {
case TypeRef(pre, sym, args) =>
checkDeprecated(sym, pos)
@@ -1230,7 +1303,7 @@ abstract class RefChecks extends InfoTransform {
private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
val sym = tree.symbol
- if (sym.isSourceMethod && sym.hasFlag(CASE) && sym.name == nme.apply)
+ if (sym.isSourceMethod && sym.isCase && sym.name == nme.apply)
toConstructor(tree.pos, tree.tpe)
else {
ifNot
@@ -1322,6 +1395,19 @@ abstract class RefChecks extends InfoTransform {
}
}
+ // Warning about nullary methods returning Unit.
+ private def checkNullaryMethodReturnType(sym: Symbol) = sym.tpe match {
+ case NullaryMethodType(restpe) if restpe.typeSymbol == UnitClass =>
+ // this may be the implementation of e.g. a generic method being parameterized
+ // on Unit, in which case we had better let it slide.
+ if (sym.isGetter || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))) ()
+ else unit.warning(sym.pos,
+ "side-effecting nullary methods are discouraged: suggest defining as `def %s()` instead".format(
+ sym.name.decode)
+ )
+ case _ => ()
+ }
+
override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
@@ -1340,6 +1426,12 @@ abstract class RefChecks extends InfoTransform {
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
+ if (settings.warnNullaryUnit.value)
+ checkNullaryMethodReturnType(sym)
+ if (settings.warnInaccessible.value) {
+ if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
+ checkAccessibilityOfReferencedTypes(tree)
+ }
tree
case Template(parents, self, body) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 2abecfa572..45663c8bee 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -421,7 +421,7 @@ trait TypeDiagnostics {
// Error suppression will squash some of these warnings unless we circumvent it.
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
- if (settings.Ywarndeadcode.value && context.unit != null && treeOK(tree) && exprOK) {
+ if (settings.warnDeadCode.value && context.unit != null && treeOK(tree) && exprOK) {
val saved = context.reportGeneralErrors
try {
context.reportGeneralErrors = true
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index b35a23ed50..0fe77f10cc 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -16,9 +16,9 @@ import scala.collection.{ mutable, immutable }
import scala.tools.nsc.util.BatchSourceFile
import mutable.ListBuffer
import symtab.Flags._
-
import util.Statistics
import util.Statistics._
+import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -80,6 +80,7 @@ trait Typers extends Modes {
abstract class Typer(context0: Context) extends TyperDiagnostics {
import context0.unit
+ import typeDebug.{ ptTree, ptBlock, ptLine }
val infer = new Inferencer(context0) {
override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
@@ -728,7 +729,7 @@ trait Typers extends Modes {
val tree1 = if (tree.isType) tree
else TypeApply(tree, tparams1 map (tparam =>
TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos //@M/tcpolyinfer: changed tparam.tpe to tparam.tpeHK
- context.undetparams = context.undetparams ::: tparams1
+ context.undetparams ++= tparams1
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
@@ -743,7 +744,7 @@ trait Typers extends Modes {
// Looking for a manifest of Nil: This has many potential types,
// so we need to instantiate to minimal type List[Nothing].
keepNothings = false, // retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
- checkCompat = isWeaklyCompatible) // #3808
+ useWeaklyCompatible = true) // #3808
}
val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree))
@@ -825,8 +826,8 @@ trait Typers extends Modes {
// Note that we treat Any and Nothing as kind-polymorphic.
// We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
// (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
- errorTree(tree, tree.tpe+" takes "+reporter.countElementsAsString(tree.tpe.typeParams.length, "type parameter")+
- ", expected: "+reporter.countAsString(pt.typeParams.length))
+ errorTree(tree, tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+
+ ", expected: "+countAsString(pt.typeParams.length))
tree setType tree.tpe
} else tree match { // (6)
case TypeTree() => tree
@@ -891,10 +892,16 @@ trait Typers extends Modes {
case TypeRef(_, sym, _) =>
// note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
// infinite expansion if pt is constant type ()
- if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) // (12)
+ if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
+ if (settings.warnValueDiscard.value)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
return typed(atPos(tree.pos)(Block(List(tree), Literal(()))), mode, pt)
- else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt))
+ }
+ else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
+ if (settings.warnNumericWiden.value)
+ context.unit.warning(tree.pos, "implicit numeric widening")
return typed(atPos(tree.pos)(Select(tree, "to"+sym.name)), mode, pt)
+ }
case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
return typed(adaptAnnotations(tree, mode, pt), mode, pt)
case _ =>
@@ -1315,7 +1322,6 @@ trait Typers extends Modes {
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
-
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
*/
@@ -2694,7 +2700,7 @@ trait Typers extends Modes {
(nme.ERROR, None)
} else {
names -= sym
- if(isJava) sym.cookJavaRawInfo() // #3429
+ if (isJava) sym.cookJavaRawInfo() // #3429
val annArg = tree2ConstArg(rhs, sym.tpe.resultType)
(sym.name, annArg)
}
@@ -2780,39 +2786,42 @@ trait Typers extends Modes {
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
- /** Given a set `rawSyms' of term- and type-symbols, and a type `tp'.
- * produce a set of fresh type parameters and a type so that it can be
- * abstracted to an existential type.
- * Every type symbol `T' in `rawSyms' is mapped to a clone.
- * Every term symbol `x' of type `T' in `rawSyms' is given an
- * associated type symbol of the following form:
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
*
- * type x.type <: T with <singleton>
+ * type x.type <: T with Singleton
*
- * The name of the type parameter is `x.type', to produce nice diagnostics.
- * The <singleton> parent ensures that the type parameter is still seen as a stable type.
- * Type symbols in rawSyms are fully replaced by the new symbols.
- * Term symbols are also replaced, except when they are the term
- * symbol of an Ident tree, in which case only the type of the
- * Ident is changed.
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
*/
protected def existentialTransform(rawSyms: List[Symbol], tp: Type) = {
val typeParams: List[Symbol] = rawSyms map { sym =>
val name = sym.name match {
case x: TypeName => x
- case x => newTypeName(x+".type")
+ case x => newTypeName(x + ".type")
}
- val bound = sym.existentialBound
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified: Symbol = sowner.newAbstractType(sym.pos, name).setFlag(EXISTENTIAL)
+ val bound = sym.existentialBound
+ val sowner = if (isRawParameter(sym)) context.owner else sym.owner
+ val quantified = sowner.newExistential(sym.pos, name)
- quantified.setInfo(bound.cloneInfo(quantified))
- quantified
+ quantified setInfo bound.cloneInfo(quantified)
}
- val typeParamTypes = typeParams map (_.tpe)
- //println("ex trans "+rawSyms+" . "+tp+" "+typeParamTypes+" "+(typeParams map (_.info)))//DEBUG
- for (tparam <- typeParams) tparam.setInfo(tparam.info.subst(rawSyms, typeParamTypes))
- (typeParams, tp.subst(rawSyms, typeParamTypes))
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ (
+ typeParams map (tparam => tparam setInfo tparam.info.subst(rawSyms, typeParamTypes)),
+ tp.subst(rawSyms, typeParamTypes)
+ )
}
/** Compute an existential type from raw hidden symbols `syms' and type `tp'
@@ -2973,16 +2982,17 @@ trait Typers extends Modes {
errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
}
- @inline final def deindentTyping() = if (printTypings) context.typingIndent = context.typingIndent.substring(0, context.typingIndent.length() - 2)
- @inline final def indentTyping() = if (printTypings) context.typingIndent += " "
- @inline final def printTyping(s: => String) = if (printTypings) println(context.typingIndent+s)
+ @inline final def deindentTyping() = context.typingIndentLevel -= 2
+ @inline final def indentTyping() = context.typingIndentLevel += 2
+ @inline final def printTyping(s: => String) = {
+ if (printTypings)
+ println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent))
+ }
+ @inline final def printInference(s: => String) = {
+ if (printInfers)
+ println(s)
+ }
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
def isPatternMode = inPatternMode(mode)
@@ -3120,7 +3130,7 @@ trait Typers extends Modes {
return fail
if (treeInfo.mayBeVarGetter(varsym)) {
- lhs1 match {
+ treeInfo.methPart(lhs1) match {
case Select(qual, name) =>
val sel = Select(qual, nme.getterToSetter(name.toTermName)) setPos lhs.pos
val app = Apply(sel, List(rhs)) setPos tree.pos
@@ -3187,7 +3197,7 @@ trait Typers extends Modes {
if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
TypeTree().setOriginal(tpt0)
- .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpe)))
+ .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
} else tpt0
}
@@ -3274,7 +3284,7 @@ trait Typers extends Modes {
}
}
- /** Try to apply function to arguments; if it does not work try to
+ /** Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
@@ -3284,6 +3294,17 @@ trait Typers extends Modes {
t
case ex: TypeError =>
stopTimer(failedApplyNanos, start)
+
+ // If the problem is with raw types, copnvert to existentials and try again.
+ // See #4712 for a case where this situation arises,
+ if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
+ val newtpe = rawToExistential(fun.tpe)
+ if (fun.tpe ne newtpe) {
+ // println("late cooking: "+fun+":"+fun.tpe) // DEBUG
+ return tryTypedApply(fun setType newtpe, args)
+ }
+ }
+
def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
case Block(_, r) => treesInResult(r)
case Match(_, cases) => cases
@@ -3295,9 +3316,13 @@ trait Typers extends Modes {
case _ => Nil
})
def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
-
- if (fun :: tree :: args exists errorInResult) {
- printTyping("second try for: "+fun+" and "+args)
+ val retry = fun :: tree :: args exists errorInResult
+ printTyping {
+ val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
+ if (retry) "second try: " + funStr
+ else "no second try: " + funStr + " because error not in result: " + ex.pos+"!="+tree.pos
+ }
+ if (retry) {
val Select(qual, name) = fun
val args1 = tryTypedArgs(args, forArgMode(fun, mode), ex)
val qual1 =
@@ -3307,8 +3332,7 @@ trait Typers extends Modes {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
return typed1(tree1, mode | SNDTRYmode, pt)
}
- } else printTyping("no second try for "+fun+" and "+args+" because error not in result:"+ex.pos+"!="+tree.pos)
-
+ }
reportTypeError(tree.pos, ex)
setError(tree)
}
@@ -3593,10 +3617,10 @@ trait Typers extends Modes {
!(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol) // null.is/as is not a dereference
}
// unit is null here sometimes; how are we to know when unit might be null? (See bug #2467.)
- if (settings.Xchecknull.value && isPotentialNullDeference && unit != null)
+ if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- result match {
+ val selection = result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
treeCopy.SelectFromTypeTree(
@@ -3616,6 +3640,21 @@ trait Typers extends Modes {
case _ =>
result
}
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions
+ // like x.getClass().newInstance() are typed with the type of x.
+ val isRefinableGetClass = (
+ selection.symbol.name == nme.getClass_
+ && selection.tpe.params.isEmpty
+ // TODO: If the type of the qualifier is inaccessible, we can cause private types
+ // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
+ // so for now it requires the type symbol be public.
+ && qual.tpe.typeSymbol.isPublic
+ )
+ if (isRefinableGetClass)
+ selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe))
+ else
+ selection
}
}
@@ -4185,7 +4224,9 @@ trait Typers extends Modes {
* @param pt ...
* @return ...
*/
- def typed(tree: Tree, mode: Int, pt: Type): Tree = { indentTyping()
+ def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ indentTyping()
+
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
if (settings.debug.value)
@@ -4198,6 +4239,7 @@ trait Typers extends Modes {
case _ => tp
}
+ var alreadyTyped = false
try {
if (Statistics.enabled) {
val t = currentTime()
@@ -4212,15 +4254,34 @@ trait Typers extends Modes {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- printTyping("typing "+tree+", pt = "+pt+", undetparams = "+context.undetparams+", implicits-enabled = "+context.implicitsEnabled+", silent = "+context.reportGeneralErrors+", context.owner = "+context.owner) //DEBUG
- var tree1 = if (tree.tpe ne null) tree else typed1(tree, mode, dropExistential(pt))
- printTyping("typed "+tree1+":"+tree1.tpe+(if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")+", undetparams = "+context.undetparams+", pt = "+pt) //DEBUG
+ alreadyTyped = tree.tpe ne null
+ var tree1: Tree = if (alreadyTyped) tree else {
+ printTyping(
+ ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ "undetparams" -> context.undetparams,
+ "implicitsEnabled" -> context.implicitsEnabled,
+ "silent" -> !context.reportGeneralErrors,
+ "context.owner" -> context.owner
+ )
+ )
+ val tree1 = typed1(tree, mode, dropExistential(pt))
+ printTyping("typed %s: %s%s".format(
+ ptTree(tree1), tree1.tpe,
+ if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")
+ )
+ tree1
+ }
tree1.tpe = addAnnotations(tree1, tree1.tpe)
-
val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
- printTyping("adapted "+tree1+":"+tree1.tpe.widen+" to "+pt+", "+context.undetparams) //DEBUG
+
+ if (!alreadyTyped) {
+ printTyping("adapted %s: %s to %s, %s".format(
+ tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ ) //DEBUG
+ }
+
// for (t <- tree1.tpe) assert(t != WildcardType)
// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -4228,7 +4289,7 @@ trait Typers extends Modes {
} catch {
case ex: TypeError =>
tree.tpe = null
- printTyping("caught "+ex+" in typed: "+tree) //DEBUG
+ printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
reportTypeError(tree.pos, ex)
setError(tree)
case ex: Exception =>
@@ -4241,6 +4302,7 @@ trait Typers extends Modes {
}
finally {
deindentTyping()
+
if (Statistics.enabled) {
val t = currentTime()
microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index bdf33476cc..3499ab86fd 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -15,7 +15,7 @@ abstract class CharArrayReader { self =>
def decodeUni: Boolean = true
/** An error routine to call on bad unicode escapes \\uxxxx. */
- protected def error(offset: Int, msg: String)
+ protected def error(offset: Int, msg: String): Unit
/** the last read character */
var ch: Char = _
@@ -68,10 +68,19 @@ abstract class CharArrayReader { self =>
(charOffset - p) % 2 == 0
}
def udigit: Int = {
- val d = digit2int(buf(charOffset), 16)
- if (d >= 0) charOffset += 1
- else error(charOffset, "error in unicode escape")
- d
+ if (charOffset >= buf.length) {
+ // Since the positioning code is very insistent about throwing exceptions,
+ // we have to decrement the position so our error message can be seen, since
+ // we are one past EOF. This happens with e.g. val x = \ u 1 <EOF>
+ error(charOffset - 1, "incomplete unicode escape")
+ SU
+ }
+ else {
+ val d = digit2int(buf(charOffset), 16)
+ if (d >= 0) charOffset += 1
+ else error(charOffset, "error in unicode escape")
+ d
+ }
}
if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) {
do charOffset += 1
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 40c3316e09..23b53fb29f 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -8,7 +8,7 @@ package scala.tools.nsc
package util
import java.net.URL
-import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
import io.{ File, Directory, Path, Jar, AbstractFile, ClassAndJarInfo }
import scala.tools.util.StringOps.splitWhere
import Jar.isJarOrZip
@@ -55,21 +55,21 @@ object ClassPath {
* (name, list of origins)
* in the order they occur on the path.
*/
- def findDuplicates(cp: ClassPath[_]) = {
- def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
- def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
-
- /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- val flattened = (
- for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
- (pkgName, pkg, clazz)
- )
- val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
-
- /** Extract results. */
- for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- (name, dups map { case (_, cp, _) => toOriginString(cp) })
- }
+ // def findDuplicates(cp: ClassPath[_]) = {
+ // def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
+ // def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
+ //
+ // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ // val flattened = (
+ // for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
+ // (pkgName, pkg, clazz)
+ // )
+ // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+ //
+ // /** Extract results. */
+ // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ // (name, dups map { case (_, cp, _) => toOriginString(cp) })
+ // }
/** Split classpath using platform-dependent path separator */
def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
@@ -103,8 +103,10 @@ object ClassPath {
/** Expand dir out to contents, a la extdir */
def expandDir(extdir: String): List[String] = {
- val dir = Option(AbstractFile getDirectory extdir) getOrElse (return Nil)
- dir filter (_.isClassContainer) map (dir.sfile.get / _.name path) toList
+ AbstractFile getDirectory extdir match {
+ case null => Nil
+ case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
+ }
}
/** A useful name filter. */
@@ -149,7 +151,9 @@ object ClassPath {
for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
newClassPath(location)
- def classesInExpandedPath(path: String) = classesInPathImpl(path, true)
+ def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
+ classesInPathImpl(path, true).toIndexedSeq
+
def classesInPath(path: String) = classesInPathImpl(path, false)
// Internal
@@ -160,8 +164,9 @@ object ClassPath {
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
- assert(rep.name endsWith ".class", rep.name)
- rep.name dropRight 6
+ val name = rep.name
+ assert(endsClass(name), name)
+ name.substring(0, name.length - 6)
}
def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
@@ -170,16 +175,18 @@ object ClassPath {
override def isValidName(name: String) = !isTraitImplementation(name)
}
+ @inline private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
+ @inline private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
+ @inline private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
+
/** From the source file to its identifier.
*/
def toSourceName(f: AbstractFile): String = {
- val nme = f.name
- if (nme.endsWith(".scala"))
- nme dropRight 6
- else if (nme.endsWith(".java"))
- nme dropRight 5
- else
- throw new FatalError("Unexpected source file ending: " + nme)
+ val name = f.name
+
+ if (endsScala(name)) name.substring(0, name.length - 6)
+ else if (endsJava(name)) name.substring(0, name.length - 5)
+ else throw new FatalError("Unexpected source file ending: " + name)
}
}
import ClassPath._
@@ -215,29 +222,29 @@ abstract class ClassPath[T] {
/** Lists of entities.
*/
- def classes: List[AnyClassRep]
- def packages: List[ClassPath[T]]
- def sourcepaths: List[AbstractFile]
+ def classes: IndexedSeq[AnyClassRep]
+ def packages: IndexedSeq[ClassPath[T]]
+ def sourcepaths: IndexedSeq[AbstractFile]
/** Information which entails walking the tree. This is probably only
* necessary for tracking down problems - it's normally not used.
*/
- def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
- def allPackageNames: List[String] = {
- def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
- (cp.packages map (prefix + _.name)) :::
- (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
- )
- subpackages("", this)
- }
- def allPackagesWithNames: List[(String, ClassPath[T])] = {
- val root = packages map (p => p.name -> p)
- val subs =
- for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
- (prefix + "." + k, v)
-
- root ::: subs
- }
+ // def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
+ // def allPackageNames: List[String] = {
+ // def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
+ // (cp.packages map (prefix + _.name)) :::
+ // (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
+ // )
+ // subpackages("", this)
+ // }
+ // def allPackagesWithNames: List[(String, ClassPath[T])] = {
+ // val root = packages map (p => p.name -> p)
+ // val subs =
+ // for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
+ // (prefix + "." + k, v)
+ //
+ // root ::: subs
+ // }
/**
* Represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
@@ -254,10 +261,9 @@ abstract class ClassPath[T] {
/** Filters for assessing validity of various entities.
*/
- def validClassFile(name: String) = (name endsWith ".class") && context.isValidName(name)
- def validPackage(name: String) = (name != "META-INF") && (name != "") && (name(0) != '.')
- def validSourceFile(name: String) = validSourceExtensions exists (name endsWith _)
- def validSourceExtensions = List(".scala", ".java")
+ def validClassFile(name: String) = endsClass(name) && context.isValidName(name)
+ def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
+ def validSourceFile(name: String) = endsScala(name) || endsJava(name)
/**
* Find a ClassRep given a class name of the form "package.subpackage.ClassName".
@@ -281,12 +287,12 @@ abstract class ClassPath[T] {
case _ => None
}
- def sortString = asURLs map (_.toString) sorted
+ def sortString = join(split(asClasspathString).sorted: _*)
override def equals(that: Any) = that match {
case x: ClassPath[_] => this.sortString == x.sortString
case _ => false
}
- override def hashCode = sortString.hashCode
+ override def hashCode = sortString.hashCode()
}
/**
@@ -295,20 +301,23 @@ abstract class ClassPath[T] {
class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.sfile.toList map (_.toURL)
+ def asURLs = if (dir.file == null) Nil else List(dir.toURL)
def asClasspathString = dir.path
- val sourcepaths: List[AbstractFile] = List(dir)
-
- lazy val classes: List[ClassRep] = dir flatMap { f =>
- if (f.isDirectory || !validSourceFile(f.name)) Nil
- else List(ClassRep(None, Some(f)))
- } toList
-
- lazy val packages: List[SourcePath[T]] = dir flatMap { f =>
- if (f.isDirectory && validPackage(f.name)) List(new SourcePath[T](f, context))
- else Nil
- } toList
+ val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
+
+ private def traverse() = {
+ val classBuf = immutable.Vector.newBuilder[ClassRep]
+ val packageBuf = immutable.Vector.newBuilder[SourcePath[T]]
+ dir foreach { f =>
+ if (!f.isDirectory && validSourceFile(f.name))
+ classBuf += ClassRep(None, Some(f))
+ else if (f.isDirectory && validPackage(f.name))
+ packageBuf += new SourcePath[T](f, context)
+ }
+ (packageBuf.result, classBuf.result)
+ }
+ lazy val (packages, classes) = traverse()
override def toString() = "sourcepath: "+ dir.toString()
}
@@ -318,111 +327,131 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = dir.sfile.toList map (_.toURL)
+ def asURLs = if (dir.file == null) Nil else List(dir.toURL)
def asClasspathString = dir.path
- val sourcepaths: List[AbstractFile] = Nil
-
- lazy val classes: List[ClassRep] = dir flatMap { f =>
- if (f.isDirectory || !validClassFile(f.name)) Nil
- else List(ClassRep(Some(f), None))
- } toList
-
- lazy val packages: List[DirectoryClassPath] = dir flatMap { f =>
- if (f.isDirectory && validPackage(f.name)) List(new DirectoryClassPath(f, context))
- else Nil
- } toList
+ val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
+
+ // calculates (packages, classes) in one traversal.
+ private def traverse() = {
+ val classBuf = immutable.Vector.newBuilder[ClassRep]
+ val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
+ dir foreach { f =>
+ if (!f.isDirectory && validClassFile(f.name))
+ classBuf += ClassRep(Some(f), None)
+ else if (f.isDirectory && validPackage(f.name))
+ packageBuf += new DirectoryClassPath(f, context)
+ }
+ (packageBuf.result, classBuf.result)
+ }
- override def toString() = "directory classpath: "+ dir
+ lazy val (packages, classes) = traverse()
+ override def toString() = "directory classpath: "+ origin.getOrElse("?")
}
/**
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: List[ClassPath[T]],
+ val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
+ def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
+ this(entries.toIndexedSeq, context)
+
def name = entries.head.name
- def asURLs = entries flatMap (_.asURLs)
- lazy val sourcepaths: List[AbstractFile] = entries flatMap (_.sourcepaths)
+ def asURLs = entries flatMap (_.asURLs) toList
+ lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
- lazy val classes: List[AnyClassRep] = {
- val cls = new ListBuffer[AnyClassRep]
+ lazy val classes: IndexedSeq[AnyClassRep] = {
+ var count = 0
+ val indices = mutable.HashMap[String, Int]()
+ val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
+
for (e <- entries; c <- e.classes) {
val name = c.name
- val idx = cls.indexWhere(_.name == name)
- if (idx >= 0) {
+ if (indices contains name) {
+ val idx = indices(name)
val existing = cls(idx)
+
if (existing.binary.isEmpty && c.binary.isDefined)
cls(idx) = existing.copy(binary = c.binary)
if (existing.source.isEmpty && c.source.isDefined)
cls(idx) = existing.copy(source = c.source)
- } else {
+ }
+ else {
+ indices(name) = count
cls += c
+ count += 1
}
}
- cls.toList
+ cls.toIndexedSeq
}
- lazy val packages: List[ClassPath[T]] = {
- val pkg = new ListBuffer[ClassPath[T]]
+ lazy val packages: IndexedSeq[ClassPath[T]] = {
+ var count = 0
+ val indices = mutable.HashMap[String, Int]()
+ val pkg = new mutable.ArrayBuffer[ClassPath[T]](256)
+
for (e <- entries; p <- e.packages) {
val name = p.name
- val idx = pkg.indexWhere(_.name == name)
- if (idx >= 0) {
+ if (indices contains name) {
+ val idx = indices(name)
pkg(idx) = addPackage(pkg(idx), p)
- } else {
+ }
+ else {
+ indices(name) = count
pkg += p
+ count += 1
}
}
- pkg.toList
+ pkg.toIndexedSeq
}
private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = {
- val newEntries = to match {
+ val newEntries: IndexedSeq[ClassPath[T]] = to match {
case cp: MergedClassPath[_] => cp.entries :+ pkg
- case _ => List(to, pkg)
+ case _ => IndexedSeq(to, pkg)
}
new MergedClassPath[T](newEntries, context)
}
-
- override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
- override def allPackageNames = entries flatMap (_.allPackageNames)
- override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
-
- def duplicatedClasses = {
- def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
-
- /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- val flattened = (
- for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
- (pkgName, pkg, clazz)
- )
- val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
-
- /** Using original name list as reference point, return duplicated entries as
- * (name, list of origins)
- * in the order they occur on the path.
- */
- for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- (name, dups map {
- case (_, cp, _) if cp.origin.isDefined => cp.origin.get
- case (_, cp, _) => cp.asURLs.mkString
- })
- }
-
+ //
+ // override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
+ // override def allPackageNames = entries flatMap (_.allPackageNames)
+ // override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
+ //
+ // def duplicatedClasses = {
+ // def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
+ //
+ // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ // val flattened = (
+ // for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
+ // (pkgName, pkg, clazz)
+ // )
+ // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+ //
+ // /** Using original name list as reference point, return duplicated entries as
+ // * (name, list of origins)
+ // * in the order they occur on the path.
+ // */
+ // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ // (name, dups map {
+ // case (_, cp, _) if cp.origin.isDefined => cp.origin.get
+ // case (_, cp, _) => cp.asURLs.mkString
+ // })
+ // }
+ //
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
asClasspathString split ':' foreach (x => println(" " + x))
}
- def showDuplicates() =
- ClassPath findDuplicates this foreach {
- case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
- }
-
+ // def showDuplicates() =
+ // ClassPath findDuplicates this foreach {
+ // case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
+ // }
+ //
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
@@ -431,7 +460,7 @@ extends ClassPath[T] {
* as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
*/
class JavaClassPath(
- containers: List[ClassPath[AbstractFile]],
+ containers: IndexedSeq[ClassPath[AbstractFile]],
context: JavaContext)
extends MergedClassPath[AbstractFile](containers, context) {
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
index 7a6f42c420..13fb3185ab 100644
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
@@ -135,7 +135,7 @@ class AssemblyClassPath(types: Array[MSILType], namespace: String, val context:
cls += ClassRep(Some(types(i)), None)
i += 1
}
- cls.toList
+ cls.toIndexedSeq
}
lazy val packages = {
@@ -152,11 +152,13 @@ class AssemblyClassPath(types: Array[MSILType], namespace: String, val context:
}
i += 1
}
- for (ns <- nsSet.toList)
+ val xs = for (ns <- nsSet.toList)
yield new AssemblyClassPath(types, ns, context)
+
+ xs.toIndexedSeq
}
- val sourcepaths: List[AbstractFile] = Nil
+ val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
override def toString() = "assembly classpath "+ namespace
}
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index 90a9057f01..bc0f706649 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -22,7 +22,7 @@ abstract class SourceFile {
def isSelfContained: Boolean
def length : Int
def position(offset: Int) : Position = {
- assert(offset < length)
+ assert(offset < length, file + ": " + offset + " >= " + length)
new OffsetPosition(this, offset)
}
def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index b1a5ba6d46..1055bfdef2 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -10,7 +10,7 @@ import java.net.{ URL, MalformedURLException }
import scala.util.Properties._
import nsc.{ Settings, GenericRunnerSettings }
import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
-import nsc.io.{ File, Directory, Path }
+import nsc.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
@@ -105,10 +105,18 @@ object PathResolver {
else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
else ""
- def scalaBootClassPath = scalaLibDirFound match {
- case Some(dir) if scalaHomeExists => join(ClassPath expandDir dir.path: _*)
- case _ => ""
- }
+ // XXX It must be time for someone to figure out what all these things
+ // are intended to do. This is disabled here because it was causing all
+ // the scala jars to end up on the classpath twice: one on the boot
+ // classpath as set up by the runner (or regular classpath under -nobootcp)
+ // and then again here.
+ def scalaBootClassPath = ""
+ // scalaLibDirFound match {
+ // case Some(dir) if scalaHomeExists =>
+ // val paths = ClassPath expandDir dir.path
+ // join(paths: _*)
+ // case _ => ""
+ // }
def scalaExtDirs = Environment.scalaExtDirs
@@ -193,7 +201,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
import context._
// Assemble the elements!
- def basis = List(
+ def basis = List[Traversable[ClassPath[AbstractFile]]](
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -228,7 +236,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
def containers = Calculated.containers
lazy val result = {
- val cp = new JavaClassPath(containers, context)
+ val cp = new JavaClassPath(containers.toIndexedSeq, context)
if (settings.Ylogcp.value) {
Console.println("Classpath built from " + settings.toConciseString)
Console.println("Defaults: " + PathResolver.Defaults)
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index 63cfc06702..65ff582011 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -16,7 +16,7 @@ package util
* @author Martin Odersky
* @version 1.0
*/
-object StringOps {
+trait StringOps {
def onull(s: String) = if (s == null) "" else s
def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
def ojoin(xs: Seq[String], sep: String) = oempty(xs: _*) mkString sep
@@ -53,4 +53,37 @@ object StringOps {
def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] =
if (idx == -1) None
else Some(str take idx, str drop (if (doDropIndex) idx + 1 else idx))
+
+ /** Returns a string meaning "n elements".
+ *
+ * @param n ...
+ * @param elements ...
+ * @return ...
+ */
+ def countElementsAsString(n: Int, elements: String): String =
+ n match {
+ case 0 => "no " + elements + "s"
+ case 1 => "one " + elements
+ case 2 => "two " + elements + "s"
+ case 3 => "three " + elements + "s"
+ case 4 => "four " + elements + "s"
+ case _ => "" + n + " " + elements + "s"
+ }
+
+ /** Turns a count into a friendly English description if n<=4.
+ *
+ * @param n ...
+ * @return ...
+ */
+ def countAsString(n: Int): String =
+ n match {
+ case 0 => "none"
+ case 1 => "one"
+ case 2 => "two"
+ case 3 => "three"
+ case 4 => "four"
+ case _ => "" + n
+ }
}
+
+object StringOps extends StringOps { }
diff --git a/src/jline/TEST-NOTE.txt b/src/jline/TEST-NOTE.txt
new file mode 100644
index 0000000000..04f5de8dc1
--- /dev/null
+++ b/src/jline/TEST-NOTE.txt
@@ -0,0 +1,4 @@
+Apparently the jline bundled with sbt interferes with testing some changes: for instance after changing the keybindings I kept seeing failures until I realized what was happening, and bypassed sbt, e.g.
+
+% java -cp ./lib_managed/scala_2.9.0/compile/jansi-1.4.jar:./lib_managed/scala_2.9.0/test/'*':./target/scala_2.9.0/classes:./target/scala_2.9.0/test-classes:./target/scala_2.9.0/resources org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
+
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
index 89d3b4a498..0c2795bca5 100644
--- a/src/jline/project/build.properties
+++ b/src/jline/project/build.properties
@@ -2,7 +2,7 @@
#Wed Mar 23 21:05:24 PDT 2011
project.organization=org.improving
project.name=jline
-sbt.version=0.7.6.RC0
-project.version=0.98
-build.scala.versions=2.8.1
+sbt.version=0.7.7
+project.version=0.99-SNAPSHOT
+build.scala.versions=2.9.0
project.initialize=false
diff --git a/src/jline/project/plugins/project/build.properties b/src/jline/project/plugins/project/build.properties
index 218ed2577f..7a06683cda 100644
--- a/src/jline/project/plugins/project/build.properties
+++ b/src/jline/project/plugins/project/build.properties
@@ -1,3 +1,3 @@
#Project properties
-#Wed Mar 23 21:05:33 PDT 2011
+#Wed May 25 15:08:22 PDT 2011
plugin.uptodate=true
diff --git a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
index 71659c5a42..94a1b98c0d 100644
--- a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
+++ b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
@@ -175,6 +175,9 @@ public class UnixTerminal
else if (key == DEL) { // alt-backspace: delete previous word
return CTRL_W.code; // DELETE_PREV_WORD
}
+ else if (c == 'd') { // alt-d: delete next word
+ return CTRL_X.code; // DELETE_NEXT_WORD
+ }
}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index f493619bcd..7882fcc1db 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -754,11 +754,11 @@ public class ConsoleReader
}
private boolean previousWord() throws IOException {
- while (isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
+ while (isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) {
// nothing
}
- while (!isDelimiter(buf.current()) && (moveCursor(-1) != 0)) {
+ while (!isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) {
// nothing
}
@@ -766,11 +766,11 @@ public class ConsoleReader
}
private boolean nextWord() throws IOException {
- while (isDelimiter(buf.current()) && (moveCursor(1) != 0)) {
+ while (isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) {
// nothing
}
- while (!isDelimiter(buf.current()) && (moveCursor(1) != 0)) {
+ while (!isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) {
// nothing
}
@@ -778,11 +778,23 @@ public class ConsoleReader
}
private boolean deletePreviousWord() throws IOException {
- while (isDelimiter(buf.current()) && backspace()) {
+ while (isDelimiter(buf.charLeftOfCursor()) && backspace()) {
// nothing
}
- while (!isDelimiter(buf.current()) && backspace()) {
+ while (!isDelimiter(buf.charLeftOfCursor()) && backspace()) {
+ // nothing
+ }
+
+ return true;
+ }
+
+ private boolean deleteNextWord() throws IOException {
+ while (isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) {
+ // nothing
+ }
+
+ while (!isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) {
// nothing
}
@@ -1283,7 +1295,7 @@ public class ConsoleReader
if (buf.buffer.length() == 0) {
return null;
} else {
- deleteCurrentCharacter();
+ success = deleteCurrentCharacter();
}
break;
@@ -1353,6 +1365,10 @@ public class ConsoleReader
success = deletePreviousWord();
break;
+ case DELETE_NEXT_WORD:
+ success = deleteNextWord();
+ break;
+
case PREV_WORD:
success = previousWord();
break;
diff --git a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java b/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java
index d9cdbec124..7993def002 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java
@@ -34,7 +34,10 @@ public class CursorBuffer
return buffer.length();
}
- public char current() {
+ /**
+ * Gets the character to the left of the cursor.
+ */
+ public char charLeftOfCursor() {
if (cursor <= 0) {
return 0;
}
@@ -43,6 +46,16 @@ public class CursorBuffer
}
/**
+ * Gets the character at the cursor.
+ */
+ public char charAtCursor() {
+ if (cursor < 0 || cursor >= buffer.length()) {
+ return 0;
+ }
+ return buffer.charAt(cursor);
+ }
+
+ /**
* Write the specific character into the buffer, setting the cursor position
* ahead one. The text may overwrite or insert based on the current setting
* of {@link #isOverTyping}.
diff --git a/src/jline/src/main/java/scala/tools/jline/console/Key.java b/src/jline/src/main/java/scala/tools/jline/console/Key.java
index 26528555df..2e713a7da2 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/Key.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/Key.java
@@ -48,6 +48,8 @@ public enum Key
CTRL_W(23),
+ CTRL_X(24),
+
CTRL_OB(27),
CTRL_QM(127),
diff --git a/src/jline/src/main/java/scala/tools/jline/console/Operation.java b/src/jline/src/main/java/scala/tools/jline/console/Operation.java
index 17b216be57..59ee878d45 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/Operation.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/Operation.java
@@ -259,6 +259,12 @@ public enum Operation
* Cancel search
*/
ABORT(-64),
+
+ /**
+ * Delete next word
+ */
+ DELETE_NEXT_WORD(-65),
+
;
public final short code;
diff --git a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
index ebb5c8818a..ad932d2a80 100644
--- a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
+++ b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
@@ -48,7 +48,7 @@
# CTRL-P: scroll to the previous element in the history buffer
16=PREV_HISTORY
-# CTRL-R: redraw the current line
+# CTRL-R: search history
18=SEARCH_PREV
# CTRL-T: move to next word
@@ -63,6 +63,9 @@
# CTRL-W: delete the word directly before the cursor
23=DELETE_PREV_WORD
+# CTRL-X: delete the word directly after the cursor
+24=DELETE_NEXT_WORD
+
# DELETE, CTRL-?: delete the next character
# 127 is the ASCII code for delete
127=DELETE_NEXT_CHAR
diff --git a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java b/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java
index be70979563..6f5d46121e 100644
--- a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java
+++ b/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java
@@ -8,9 +8,7 @@ package scala.tools.jline.console;
import org.junit.Test;
-import static scala.tools.jline.console.Operation.DELETE_PREV_WORD;
-import static scala.tools.jline.console.Operation.MOVE_TO_END;
-import static scala.tools.jline.console.Operation.PREV_WORD;
+import static scala.tools.jline.console.Operation.*;
/**
* Tests various features of editing lines.
@@ -29,7 +27,19 @@ public class EditLineTest
assertBuffer("This ", b = b.op(DELETE_PREV_WORD));
assertBuffer("", b = b.op(DELETE_PREV_WORD));
assertBuffer("", b = b.op(DELETE_PREV_WORD));
- assertBuffer("", b = b.op(DELETE_PREV_WORD));
+ assertBuffer("", b.op(DELETE_PREV_WORD));
+ }
+
+ @Test
+ public void testDeleteNextWord() throws Exception {
+ Buffer b = new Buffer("This is a test ");
+
+ assertBuffer(" is a test ", b = b.op(MOVE_TO_BEG).op(DELETE_NEXT_WORD));
+ assertBuffer(" a test ", b = b.op(DELETE_NEXT_WORD));
+ assertBuffer(" test ", b = b.op(DELETE_NEXT_WORD));
+ assertBuffer(" ", b = b.op(DELETE_NEXT_WORD));
+ assertBuffer("", b = b.op(DELETE_NEXT_WORD));
+ assertBuffer("", b.op(DELETE_NEXT_WORD));
}
@Test
@@ -96,6 +106,32 @@ public class EditLineTest
}
@Test
+ public void testNextWord() throws Exception {
+ assertBuffer("ThisX is a test",
+ new Buffer("This is a test").op(MOVE_TO_BEG)
+ .op(NEXT_WORD)
+ .append('X'));
+ assertBuffer("This isX a test",
+ new Buffer("This is a test").op(MOVE_TO_BEG)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .append('X'));
+ assertBuffer("This is aX test",
+ new Buffer("This is a test").op(MOVE_TO_BEG)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .append('X'));
+ assertBuffer("This is a testX ",
+ new Buffer("This is a test ").op(MOVE_TO_BEG)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .op(NEXT_WORD)
+ .append('X'));
+ }
+
+ @Test
public void testLineStart() throws Exception {
assertBuffer("XThis is a test",
new Buffer("This is a test").ctrlA().append('X'));
@@ -139,7 +175,7 @@ public class EditLineTest
assertBuffer("est", b = b.back());
assertBuffer("est", b = b.back());
assertBuffer("est", b = b.back());
- assertBuffer("est", b = b.back());
+ assertBuffer("est", b.back());
}
@Test
@@ -162,7 +198,7 @@ public class EditLineTest
assertBuffer("", b = b.back());
assertBuffer("", b = b.back());
assertBuffer("", b = b.back());
- assertBuffer("", b = b.back());
+ assertBuffer("", b.back());
}
@Test
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index a97e5f050b..a22a823da7 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -54,6 +54,12 @@ abstract class Any {
*/
def toString: String
+ /** Returns the runtime class representation of the object.
+ *
+ * @return a class object corresponding to the static type of the receiver
+ */
+ def getClass(): Class[_]
+
/** Test two objects for equality.
*
* @param that the object to compare against this object for equality.
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 7719065ae7..f77bdd2ea0 100755
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -29,6 +29,8 @@ final class Boolean extends AnyVal {
def |(x: Boolean): Boolean = sys.error("stub")
def &(x: Boolean): Boolean = sys.error("stub")
def ^(x: Boolean): Boolean = sys.error("stub")
+
+ def getClass(): Class[Boolean] = sys.error("stub")
}
object Boolean extends AnyValCompanion {
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 540e05c19f..8c598e044a 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -144,6 +144,7 @@ final class Byte extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Byte] = sys.error("stub")
}
object Byte extends AnyValCompanion {
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index a5d1cb019b..a8f15125bf 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -144,6 +144,7 @@ final class Char extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Char] = sys.error("stub")
}
object Char extends AnyValCompanion {
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 5f2e01063f..108c6207bb 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -118,6 +118,7 @@ final class Double extends AnyVal {
def %(x: Float): Double = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Double] = sys.error("stub")
}
object Double extends AnyValCompanion {
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 051bdfef67..4fac27bd87 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -157,10 +157,14 @@ abstract class Enumeration(initial: Int, names: String*) extends Serializable {
protected final def Value(i: Int, name: String): Value = new Val(i, name)
private def populateNameMap() {
+ val fields = getClass.getDeclaredFields
+ def isValDef(m: JMethod) = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
+
// The list of possible Value methods: 0-args which return a conforming type
val methods = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
classOf[Value].isAssignableFrom(m.getReturnType) &&
- m.getDeclaringClass != classOf[Enumeration])
+ m.getDeclaringClass != classOf[Enumeration] &&
+ isValDef(m))
methods foreach { m =>
val name = m.getName
// invoke method to obtain actual `Value` instance
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index f403e5ccab..9ef7181806 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -118,6 +118,7 @@ final class Float extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Float] = sys.error("stub")
}
object Float extends AnyValCompanion {
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index c53be1241d..4546934149 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -144,6 +144,7 @@ final class Int extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Int] = sys.error("stub")
}
object Int extends AnyValCompanion {
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 3124130663..12b8a25b8a 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -144,6 +144,7 @@ final class Long extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Long] = sys.error("stub")
}
object Long extends AnyValCompanion {
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index bab3a1c759..8e938aaeec 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -30,10 +30,9 @@ object Option {
/** Represents optional values. Instances of `Option`
* are either an instance of $some or the object $none.
*
- * The most idiomatic way to use an $option instance
- * is to treat it as a collection or monad and
- * use `map`,`flatMap`, `filter`,
- * or `foreach`:
+ * The most idiomatic way to use an $option instance is to treat it
+ * as a collection or monad and use `map`,`flatMap`, `filter`, or
+ * `foreach`:
*
* {{{
* val name:Option[String] = request.getParameter("name")
@@ -105,7 +104,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
*
* @param default the default expression.
*/
- def getOrElse[B >: A](default: => B): B =
+ @inline final def getOrElse[B >: A](default: => B): B =
if (isEmpty) default else this.get
/** Returns the option's value if it is nonempty,
@@ -117,7 +116,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* val textField = new JComponent(initalText.orNull,20)
* }}}
*/
- def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse null
+ @inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse null
/** Returns a $some containing the result of applying $f to this $option's
* value if this $option is nonempty.
@@ -130,7 +129,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @see flatMap
* @see foreach
*/
- def map[B](f: A => B): Option[B] =
+ @inline final def map[B](f: A => B): Option[B] =
if (isEmpty) None else Some(f(this.get))
/** Returns the result of applying $f to this $option's value if
@@ -143,7 +142,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @see map
* @see foreach
*/
- def flatMap[B](f: A => Option[B]): Option[B] =
+ @inline final def flatMap[B](f: A => Option[B]): Option[B] =
if (isEmpty) None else f(this.get)
/** Returns this $option if it is nonempty '''and''' applying the predicate $p to
@@ -151,7 +150,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
*
* @param p the predicate used for testing.
*/
- def filter(p: A => Boolean): Option[A] =
+ @inline final def filter(p: A => Boolean): Option[A] =
if (isEmpty || p(this.get)) this else None
/** Returns this $option if it is nonempty '''and''' applying the predicate $p to
@@ -159,7 +158,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
*
* @param p the predicate used for testing.
*/
- def filterNot(p: A => Boolean): Option[A] =
+ @inline final def filterNot(p: A => Boolean): Option[A] =
if (isEmpty || !p(this.get)) this else None
/** Necessary to keep $option from being implicitly converted to
@@ -184,7 +183,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
*
* @param p the predicate to test
*/
- def exists(p: A => Boolean): Boolean =
+ @inline final def exists(p: A => Boolean): Boolean =
!isEmpty && p(this.get)
/** Apply the given procedure $f to the option's value,
@@ -194,7 +193,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @see map
* @see flatMap
*/
- def foreach[U](f: A => U) {
+ @inline final def foreach[U](f: A => U) {
if (!isEmpty) f(this.get)
}
@@ -215,7 +214,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* otherwise return the result of evaluating `alternative`.
* @param alternative the alternative expression.
*/
- def orElse[B >: A](alternative: => Option[B]): Option[B] =
+ @inline final def orElse[B >: A](alternative: => Option[B]): Option[B] =
if (isEmpty) alternative else this
/** Returns a singleton iterator returning the $option's value
@@ -238,7 +237,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @param left the expression to evaluate and return if this is empty
* @see toLeft
*/
- def toRight[X](left: => X) =
+ @inline final def toRight[X](left: => X) =
if (isEmpty) Left(left) else Right(this.get)
/** Returns a [[scala.Right]] containing the given
@@ -249,7 +248,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @param right the expression to evaluate and return if this is empty
* @see toRight
*/
- def toLeft[X](right: => X) =
+ @inline final def toLeft[X](right: => X) =
if (isEmpty) Right(right) else Left(this.get)
}
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 68fdee8411..acc9e2b3ad 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -97,8 +97,8 @@ object Predef extends LowPriorityImplicits {
* @param p the expression to test
* @param msg a String to include in the failure message
*/
- @elidable(ASSERTION)
- def assert(assertion: Boolean, message: => Any) {
+ @elidable(ASSERTION) @inline
+ final def assert(assertion: Boolean, message: => Any) {
if (!assertion)
throw new java.lang.AssertionError("assertion failed: "+ message)
}
@@ -128,8 +128,8 @@ object Predef extends LowPriorityImplicits {
* @param p the expression to test
* @param msg a String to include in the failure message
*/
- @elidable(ASSERTION)
- def assume(assumption: Boolean, message: => Any) {
+ @elidable(ASSERTION) @inline
+ final def assume(assumption: Boolean, message: => Any) {
if (!assumption)
throw new java.lang.AssertionError("assumption failed: "+ message)
}
@@ -152,7 +152,7 @@ object Predef extends LowPriorityImplicits {
* @param p the expression to test
* @param msg a String to include in the failure message
*/
- def require(requirement: Boolean, message: => Any) {
+ @inline final def require(requirement: Boolean, message: => Any) {
if (!requirement)
throw new IllegalArgumentException("requirement failed: "+ message)
}
@@ -334,21 +334,29 @@ object Predef extends LowPriorityImplicits {
// Type Constraints --------------------------------------------------------------
- /** An instance of `A <:< B` witnesses that `A` is a subtype of `B`.
+ /**
+ * An instance of `A <:< B` witnesses that `A` is a subtype of `B`.
+ * Requiring an implicit argument of the type `A <:< B` encodes
+ * the generalized constraint `A <: B`.
+ *
+ * @note we need a new type constructor `<:<` and evidence `conforms`,
+ * as reusing `Function1` and `identity` leads to ambiguities in
+ * case of type errors (any2stringadd is inferred)
*
- * Requiring an implicit argument of the type `A <:< B` encodes the generalized constraint `A <: B`.
+ * To constrain any abstract type T that's in scope in a method's
+ * argument list (not just the method's own type parameters) simply
+ * add an implicit argument of type `T <:< U`, where U is the required
+ * upper bound; or for lower-bounds, use: `L <:< T`, where L is the
+ * required lower bound.
*
- * @note we need a new type constructor `<:<` and evidence `conforms`, as
- * reusing `Function2` and `identity` leads to ambiguities in case of type errors (any2stringadd is inferred)
- * to constrain any abstract type T that's in scope in a method's argument list (not just the method's own type parameters)
- * simply add an implicit argument of type `T <:< U`, where U is the required upper bound (for lower-bounds, use: `L <:< T`,
- * where L is the required lower bound).
- * in part contributed by Jason Zaugg
+ * In part contributed by Jason Zaugg.
*/
@implicitNotFound(msg = "Cannot prove that ${From} <:< ${To}.")
sealed abstract class <:<[-From, +To] extends (From => To) with Serializable
- implicit def conforms[A]: A <:< A = new (A <:< A) { def apply(x: A) = x }
- // not in the <:< companion object because it is also intended to subsume identity (which is no longer implicit)
+ private[this] final val singleton_<:< = new <:<[Any,Any] { def apply(x: Any): Any = x }
+ // not in the <:< companion object because it is also
+ // intended to subsume identity (which is no longer implicit)
+ implicit def conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A]
/** An instance of `A =:= B` witnesses that the types `A` and `B` are equal.
*
@@ -356,8 +364,9 @@ object Predef extends LowPriorityImplicits {
*/
@implicitNotFound(msg = "Cannot prove that ${From} =:= ${To}.")
sealed abstract class =:=[From, To] extends (From => To) with Serializable
+ private[this] final val singleton_=:= = new =:=[Any,Any] { def apply(x: Any): Any = x }
object =:= {
- implicit def tpEquals[A]: A =:= A = new (A =:= A) {def apply(x: A) = x}
+ implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A]
}
// less useful due to #2781
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 8bf6f09154..81953505b7 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -144,6 +144,7 @@ final class Short extends AnyVal {
def %(x: Float): Float = sys.error("stub")
def %(x: Double): Double = sys.error("stub")
+ def getClass(): Class[Short] = sys.error("stub")
}
object Short extends AnyValCompanion {
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 58b024273b..c5d12afeba 100755
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -15,8 +15,9 @@ package scala
* not represented as objects by the underlying host system. There is
* only one value of type Unit: `()`.
*/
-final class Unit extends AnyVal { }
-
+final class Unit extends AnyVal {
+ def getClass(): Class[Unit] = sys.error("stub")
+}
object Unit extends AnyValCompanion {
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index d7b4b2ee5e..18132f0a7b 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -34,7 +34,7 @@ import generic.{ CanBuildFrom => CBF, _ }
* This is a base trait for all Scala collections that define an `iterator`
* method to step through one-by-one the collection's elements.
*/
-private[collection] trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
+trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
def iterator: Iterator[A]
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index c3f0adc310..9e3927eaf4 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -15,9 +15,9 @@ import TraversableView.NoBuilder
-private[collection] trait GenIterableViewLike[+A,
- +Coll,
- +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
+trait GenIterableViewLike[+A,
+ +Coll,
+ +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
extends GenIterable[A] with GenIterableLike[A, This] with GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This] {
self =>
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 9ae388afb4..2bbcc8f4f5 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -21,7 +21,7 @@ package scala.collection
* A map is a collection of bindings from keys to values, where there are
* no duplicate keys.
*/
-private[collection] trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] {
+trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] {
def default(key: A): B
def get(key: A): Option[B]
def apply(key: A): B
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index f5e8113d28..a5983e9ac2 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -30,7 +30,7 @@ import annotation.bridge
* Sequences are special cases of iterable collections of class `Iterable`.
* Unlike iterables, sequences always have a defined order of elements.
*/
-private[collection] trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] {
+trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] {
/** Selects an element by its index in the $coll.
*
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
index 74d558342f..2f06a52cd3 100644
--- a/src/library/scala/collection/GenSeqViewLike.scala
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -11,9 +11,9 @@ package scala.collection
-private[collection] trait GenSeqViewLike[+A,
- +Coll,
- +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+trait GenSeqViewLike[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
self =>
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 1c998351b4..2fc94c2e87 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -22,11 +22,11 @@ import annotation.bridge
*
* A set is a collection that contains no duplicate elements.
*/
-private[collection] trait GenSetLike[A, +Repr]
- extends GenIterableLike[A, Repr]
- with (A => Boolean)
- with Equals
- with Parallelizable[A, parallel.ParSet[A]] {
+trait GenSetLike[A, +Repr]
+extends GenIterableLike[A, Repr]
+ with (A => Boolean)
+ with Equals
+ with Parallelizable[A, parallel.ParSet[A]] {
def iterator: Iterator[A]
def contains(elem: A): Boolean
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 44aae3053f..2a613a4645 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -50,7 +50,7 @@ import annotation.migration
* @author Aleksandar Prokopec
* @since 2.9
*/
-private[collection] trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] {
+trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] {
def repr: Repr
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 3d2ebf3a22..9f7bbadfb2 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -16,9 +16,9 @@ import annotation.migration
-private[collection] trait GenTraversableViewLike[+A,
- +Coll,
- +This <: GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This]]
+trait GenTraversableViewLike[+A,
+ +Coll,
+ +This <: GenTraversableView[A, Coll] with GenTraversableViewLike[A, Coll, This]]
extends GenTraversable[A] with GenTraversableLike[A, This] {
self =>
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 2a208e0e3f..395e253430 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -125,7 +125,7 @@ self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (elems: GenTraversableOnce[A]): This = newBuilder ++= seq ++= elems.seq result
+ def ++ (elems: GenTraversableOnce[A]): This = (repr /: elems.seq)(_ + _)
@bridge
def ++ (elems: TraversableOnce[A]): This = ++ (elems: GenTraversableOnce[A])
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 23e967aea0..03286fef67 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -82,6 +82,27 @@ trait TraversableViewLike[+A,
trait Transformed[+B] extends TraversableView[B, Coll] with super.Transformed[B] {
def foreach[U](f: B => U): Unit
+ // Methods whose standard implementations use "isEmpty" need to be rewritten
+ // for views, else they will end up traversing twice in a situation like:
+ // xs.view.flatMap(f).headOption
+ override def headOption: Option[B] = {
+ for (x <- this)
+ return Some(x)
+
+ None
+ }
+ override def lastOption: Option[B] = {
+ // (Should be) better than allocating a Some for every element.
+ var empty = true
+ var result: B = null.asInstanceOf[B]
+ for (x <- this) {
+ empty = false
+ result = x
+ }
+ if (empty) None else Some(result)
+ }
+
+ // XXX: As yet not dealt with, tail and init both call isEmpty.
override def stringPrefix = self.stringPrefix
override def toString = viewToString
}
@@ -169,7 +190,7 @@ trait TraversableViewLike[+A,
newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: A => K): immutable.Map[K, This] =
- thisSeq.groupBy(f).mapValues(xs => newForced(thisSeq))
+ thisSeq groupBy f mapValues (xs => newForced(xs))
override def toString = viewToString
}
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index e4b539f962..0e32118696 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -192,24 +192,22 @@ extends IndexedSeq[T] with Serializable {
/** A companion object for numeric ranges.
*/
object NumericRange {
- import Ordering.Implicits._
- import math.Integral.Implicits._
-
/** Calculates the number of elements in a range given start, end, step, and
* whether or not it is inclusive. Throws an exception if step == 0 or
* the number of elements exceeds the maximum Int.
*/
- def count[T: Integral](start: T, end: T, step: T, isInclusive: Boolean): Int = {
- val zero = implicitly[Integral[T]].zero
- val upward = start < end
- val posStep = step > zero
+ def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = {
+ val zero = num.zero
+ val upward = num.lt(start, end)
+ val posStep = num.gt(step, zero)
if (step == zero) throw new IllegalArgumentException("step cannot be 0.")
else if (start == end) if (isInclusive) 1 else 0
else if (upward != posStep) 0
else {
- val jumps = ((end - start) / step).toLong
- val remainder = ((end - start) % step).toLong
+ val diff = num.minus(end, start)
+ val jumps = num.toLong(num.quot(diff, step))
+ val remainder = num.toLong(num.rem(diff, step))
val longCount = jumps + (
if (!isInclusive && zero == remainder) 0 else 1
)
@@ -220,7 +218,7 @@ object NumericRange {
* overflow turn up as an empty range.
*/
// The second condition contradicts an empty result.
- val isOverflow = longCount == 0 && (start + step < end) == upward
+ val isOverflow = longCount == 0 && num.lt(num.plus(start, step), end) == upward
if (longCount > scala.Int.MaxValue || longCount < 0L || isOverflow) {
val word = if (isInclusive) "to" else "until"
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index b22998fb4b..804c67527a 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -128,7 +128,7 @@ extends IndexedSeq[Int]
if (isEmpty)
Nil.init
- dropRight(length - 1)
+ dropRight(1)
}
/** Creates a new range containing all the elements of this range except the first one.
@@ -243,8 +243,13 @@ object Range {
def count(start: Int, end: Int, step: Int): Int =
count(start, end, step, false)
- def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int =
- NumericRange.count[Long](start, end, step, isInclusive)
+ def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = {
+ // faster path for the common counting range
+ if (start >= 0 && end > start && end < scala.Int.MaxValue && step == 1)
+ (end - start) + ( if (isInclusive) 1 else 0 )
+ else
+ NumericRange.count[Long](start, end, step, isInclusive)
+ }
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index b6145ecaf0..2c90f8eb6b 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -146,6 +146,10 @@ self =>
)
else super.++(that)(bf)
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ if (isStreamBuilder(bf)) asThat(cons(elem, this))
+ else super.+:(elem)(bf)
+
/**
* Create a new stream which contains all intermediate results of applying the operator
* to subsequent elements left to right.
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 63d5984b11..5fc71c7259 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -36,7 +36,17 @@ final class StringOps(override val repr: String) extends StringLike[String] {
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = StringBuilder.newBuilder
+ override def apply(index: Int): Char = repr charAt index
+ override def slice(from: Int, until: Int): String = {
+ val start = if (from < 0) 0 else from
+ if (until <= start || start >= repr.length)
+ return ""
+
+ val end = if (until > length) length else until
+ repr.substring(start, end)
+ }
override def toString = repr
+ override def length = repr.length
def seq = this.iterator
}
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index 6c90d0c3ca..512944f041 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -37,8 +37,15 @@ class WrappedString(val self: String) extends IndexedSeq[Char] with StringLike[W
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = WrappedString.newBuilder
- override def slice(from: Int, until: Int): WrappedString =
- new WrappedString(self.substring(from max 0, until min self.length))
+ override def slice(from: Int, until: Int): WrappedString = {
+ val start = if (from < 0) 0 else from
+ if (until <= start || start >= repr.length)
+ return new WrappedString("")
+
+ val end = if (until > length) length else until
+ new WrappedString(repr.substring(start, end))
+ }
+ override def length = self.length
override def toString = self
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 012105d7c4..e24f0cfa00 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -25,9 +25,10 @@ object ArrayStack extends SeqFactory[ArrayStack] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, ArrayStack[A]] = new ArrayStack[A]
def empty: ArrayStack[Nothing] = new ArrayStack()
- def apply[A: ClassManifest](elems: A*): ArrayStack[A]= {
+ def apply[A: ClassManifest](elems: A*): ArrayStack[A] = {
val els: Array[AnyRef] = elems.reverse.map{_.asInstanceOf[AnyRef]}(breakOut)
- new ArrayStack[A](els, els.length)
+ if (els.length == 0) new ArrayStack()
+ else new ArrayStack[A](els, els.length)
}
private[mutable] def growArray(x: Array[AnyRef]) = {
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index f2e42f2ca1..3118d6aa31 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -279,7 +279,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
// improve(hcode) & (table.length - 1)
val improved = improve(hcode)
val ones = table.length - 1
- (improved >> (32 - java.lang.Integer.bitCount(ones))) & ones
+ (improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
}
protected def clearTable() {
@@ -323,7 +323,7 @@ private[collection] object FlatHashTable {
*/
private[collection] def initialSize: Int = 16
- private[collection] def sizeForThreshold(size: Int, _loadFactor: Int) = size * loadFactorDenum / _loadFactor
+ private[collection] def sizeForThreshold(size: Int, _loadFactor: Int) = (size.toLong * loadFactorDenum / _loadFactor).toInt
private[collection] def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 6a75108dfb..f1271627c1 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -245,9 +245,14 @@ final class ListBuffer[A]
*
* @param xs the list to which elements are prepended
*/
- def prependToList(xs: List[A]): List[A] =
+ def prependToList(xs: List[A]): List[A] = {
if (start.isEmpty) xs
- else { last0.tl = xs; toList }
+ else {
+ if (exported) copy()
+ last0.tl = xs
+ toList
+ }
+ }
// Overrides of methods in Buffer
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index edd59a8221..8d7ed8bcd2 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -171,6 +171,17 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*/
def +=(x: Char): this.type = { append(x); this }
+ /** Optimization.
+ */
+ def ++=(s: String): this.type = {
+ underlying append s
+ this
+ }
+ def appendAll(xs: String): StringBuilder = {
+ underlying append xs
+ this
+ }
+
/** !!! This should create a new sequence.
*/
def +(x: Char): this.type = { +=(x); this }
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 0e48995cbe..3e22e3cdd8 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -178,6 +178,8 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
threshold = FlatHashTable.newThreshold(_loadFactor, table.length)
sizeMapInit(table.length)
+ override def toString = "AFHT(%s)".format(table.length)
+
def tableLength = table.length
def setSize(sz: Int) = tableSize = sz
@@ -194,7 +196,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
* the table will try to add the element in such a position if possible. Collisions are resolved
* using linear hashing, so the element may actually have to be added to a position
* that follows the specified one. In the case that the first unoccupied position
- * comes after `comesBefore`, the element is not added and the method simply returns `-1`,
+ * comes after `comesBefore`, the element is not added and the method simply returns -1,
* indicating that it couldn't add the element in a position that comes before the
* specified one.
* If the element is already present in the hash table, it is not added, and this method
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index decae62dd2..7c83d43487 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -121,8 +121,10 @@ package object parallel {
/* classes */
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
- final class CompositeThrowable(val throwables: Set[Throwable])
- extends Throwable("Multiple exceptions thrown during a parallel computation: " + throwables.map(t => (t, t.getStackTrace.toList)).mkString(", "))
+ final case class CompositeThrowable(val throwables: Set[Throwable])
+ extends Throwable("Multiple exceptions thrown during a parallel computation: " + throwables.map(
+ t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")
+ ).mkString("\n\n"))
/** A helper iterator for iterating very small array buffers.
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index fafb57da55..60d67a0b58 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -31,8 +31,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
val bufReader = BufferedSource.this.bufferedReader()
var nextLine = bufReader.readLine
- override def hasNext() = nextLine != null
-
+ override def hasNext = nextLine != null
override def next(): String = {
val result = nextLine
nextLine = bufReader.readLine
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index caffbe6133..f98ef819d8 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -253,9 +253,12 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def signum: Int = this.bigInteger.signum()
- /** Returns the bitwise complement of this BigNum
+ @deprecated("Use ~bigInt (the unary_~ method) instead", "2.10.0")
+ def ~ : BigInt = ~this
+
+ /** Returns the bitwise complement of this BigInt
*/
- def ~ : BigInt = new BigInt(this.bigInteger.not())
+ def unary_~ : BigInt = new BigInt(this.bigInteger.not())
/** Returns true if and only if the designated bit is set.
*/
diff --git a/src/library/scala/reflect/generic/AnnotationInfos.scala b/src/library/scala/reflect/generic/AnnotationInfos.scala
index 6239ca189c..f995a2b340 100755
--- a/src/library/scala/reflect/generic/AnnotationInfos.scala
+++ b/src/library/scala/reflect/generic/AnnotationInfos.scala
@@ -1,7 +1,7 @@
package scala.reflect
package generic
-trait AnnotationInfos { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait AnnotationInfos { self: Universe =>
type AnnotationInfo <: AnyRef
val AnnotationInfo: AnnotationInfoExtractor
diff --git a/src/library/scala/reflect/generic/ByteCodecs.scala b/src/library/scala/reflect/generic/ByteCodecs.scala
index ae15e2941b..8993e068d9 100644
--- a/src/library/scala/reflect/generic/ByteCodecs.scala
+++ b/src/library/scala/reflect/generic/ByteCodecs.scala
@@ -214,11 +214,3 @@ object ByteCodecs {
decode7to8(xs, len)
}
}
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/reflect/generic/Constants.scala b/src/library/scala/reflect/generic/Constants.scala
index 88a213481d..ca04fdac9e 100755
--- a/src/library/scala/reflect/generic/Constants.scala
+++ b/src/library/scala/reflect/generic/Constants.scala
@@ -9,7 +9,7 @@ package generic
import java.lang.Integer.toOctalString
import annotation.switch
-trait Constants {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Constants {
self: Universe =>
import definitions._
diff --git a/src/library/scala/reflect/generic/Flags.scala b/src/library/scala/reflect/generic/Flags.scala
index d7c8ff9296..b56faf8934 100755
--- a/src/library/scala/reflect/generic/Flags.scala
+++ b/src/library/scala/reflect/generic/Flags.scala
@@ -3,7 +3,7 @@ package generic
/** Flags set on Modifiers instances in the parsing stage.
*/
-class ModifierFlags {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") class ModifierFlags {
final val IMPLICIT = 0x00000200
final val FINAL = 0x00000020
final val PRIVATE = 0x00000004
@@ -43,7 +43,7 @@ class ModifierFlags {
}
object ModifierFlags extends ModifierFlags
-class Flags extends ModifierFlags {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") class Flags extends ModifierFlags {
final val METHOD = 0x00000040 // a method
final val MODULE = 0x00000100 // symbol is module or class implementing a module
final val PACKAGE = 0x00004000 // symbol is a java package
@@ -102,7 +102,7 @@ class Flags extends ModifierFlags {
final val PickledFlags: Long = 0xFFFFFFFFL
- private val rawPickledCorrespondence = List(
+ private def rawPickledCorrespondence = Array(
(IMPLICIT, IMPLICIT_PKL),
(FINAL, FINAL_PKL),
(PRIVATE, PRIVATE_PKL),
@@ -116,20 +116,31 @@ class Flags extends ModifierFlags {
(MODULE, MODULE_PKL),
(INTERFACE, INTERFACE_PKL)
)
- private def mkCorrespondenceArray(correspondence: List[(Int, Int)]) = {
- def f(flags: Int): Int = {
- correspondence.foldLeft(0) {
- case (result, (oldFlag, newFlag)) =>
- if ((flags & oldFlag) != 0) result | newFlag
- else result
- }
+ private val rawFlags: Array[Int] = rawPickledCorrespondence map (_._1)
+ private val pickledFlags: Array[Int] = rawPickledCorrespondence map (_._2)
+
+ private def r2p(flags: Int): Int = {
+ var result = 0
+ var i = 0
+ while (i < rawFlags.length) {
+ if ((flags & rawFlags(i)) != 0)
+ result |= pickledFlags(i)
+
+ i += 1
+ }
+ result
+ }
+ private def p2r(flags: Int): Int = {
+ var result = 0
+ var i = 0
+ while (i < rawFlags.length) {
+ if ((flags & pickledFlags(i)) != 0)
+ result |= rawFlags(i)
+
+ i += 1
}
- 0 to PKL_MASK map f toArray
+ result
}
- /** A map from the raw to pickled flags, and vice versa.
- */
- private val r2p = mkCorrespondenceArray(rawPickledCorrespondence)
- private val p2r = mkCorrespondenceArray(rawPickledCorrespondence map (_.swap))
// Generated by mkFlagToStringMethod() at Mon Oct 11 10:07:29 PDT 2010
@annotation.switch override def flagToString(flag: Long): String = flag match {
@@ -232,7 +243,7 @@ class Flags extends ModifierFlags {
// List of the raw flags, in pickled order
protected val pickledListOrder: List[Long] = {
val all = 0 to 62 map (1L << _)
- val front = rawPickledCorrespondence map (_._1.toLong)
+ val front = rawFlags map (_.toLong)
front.toList ++ (all filterNot (front contains _))
}
diff --git a/src/library/scala/reflect/generic/HasFlags.scala b/src/library/scala/reflect/generic/HasFlags.scala
index ca8e2ede86..3d9d121adf 100644
--- a/src/library/scala/reflect/generic/HasFlags.scala
+++ b/src/library/scala/reflect/generic/HasFlags.scala
@@ -76,7 +76,7 @@ import Flags._
/** Common code utilized by Modifiers (which carry the flags associated
* with Trees) and Symbol.
*/
-trait HasFlags {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait HasFlags {
type FlagsType
type AccessBoundaryType
type AnnotationType
diff --git a/src/library/scala/reflect/generic/Names.scala b/src/library/scala/reflect/generic/Names.scala
index 90c38c04b3..1906a99b8b 100755
--- a/src/library/scala/reflect/generic/Names.scala
+++ b/src/library/scala/reflect/generic/Names.scala
@@ -1,7 +1,7 @@
package scala.reflect
package generic
-trait Names {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Names {
type Name >: Null <: AnyRef
type TypeName <: Name
type TermName <: Name
diff --git a/src/library/scala/reflect/generic/PickleBuffer.scala b/src/library/scala/reflect/generic/PickleBuffer.scala
index c56d55fa1d..f52a248f3c 100755
--- a/src/library/scala/reflect/generic/PickleBuffer.scala
+++ b/src/library/scala/reflect/generic/PickleBuffer.scala
@@ -12,7 +12,7 @@ package generic
* @param from The first index where defined data are found
* @param to The first index where new data can be written
*/
-class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
var bytes = data
var readIndex = from
diff --git a/src/library/scala/reflect/generic/PickleFormat.scala b/src/library/scala/reflect/generic/PickleFormat.scala
index 1667e8c24f..c6308e7db8 100755
--- a/src/library/scala/reflect/generic/PickleFormat.scala
+++ b/src/library/scala/reflect/generic/PickleFormat.scala
@@ -9,7 +9,7 @@ package generic
* @author Martin Odersky
* @version 1.0
*/
-object PickleFormat {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") object PickleFormat {
/***************************************************
* Symbol table attribute format:
diff --git a/src/library/scala/reflect/generic/Scopes.scala b/src/library/scala/reflect/generic/Scopes.scala
index 9aff63d958..5ca7c95467 100755
--- a/src/library/scala/reflect/generic/Scopes.scala
+++ b/src/library/scala/reflect/generic/Scopes.scala
@@ -1,7 +1,7 @@
package scala.reflect
package generic
-trait Scopes { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Scopes { self: Universe =>
abstract class AbsScope extends Iterable[Symbol] {
private[reflect] def enter(sym: Symbol): Symbol
diff --git a/src/library/scala/reflect/generic/StandardDefinitions.scala b/src/library/scala/reflect/generic/StandardDefinitions.scala
index 75fe2b9200..49ac1d37a9 100755
--- a/src/library/scala/reflect/generic/StandardDefinitions.scala
+++ b/src/library/scala/reflect/generic/StandardDefinitions.scala
@@ -6,7 +6,7 @@
package scala.reflect
package generic
-trait StandardDefinitions { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait StandardDefinitions { self: Universe =>
val definitions: AbsDefinitions
diff --git a/src/library/scala/reflect/generic/StdNames.scala b/src/library/scala/reflect/generic/StdNames.scala
index 3d4cdc46ce..8f8d695016 100755
--- a/src/library/scala/reflect/generic/StdNames.scala
+++ b/src/library/scala/reflect/generic/StdNames.scala
@@ -3,7 +3,7 @@ package generic
import scala.reflect.NameTransformer
-trait StdNames {
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait StdNames {
self: Universe =>
val nme: LibraryTermNames
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
index 3614bfdeb4..a58711663e 100755
--- a/src/library/scala/reflect/generic/Symbols.scala
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -3,7 +3,7 @@ package generic
import Flags._
-trait Symbols { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Symbols { self: Universe =>
type Symbol >: Null <: AbsSymbol
diff --git a/src/library/scala/reflect/generic/Trees.scala b/src/library/scala/reflect/generic/Trees.scala
index baf36c9749..d44f6b5b8f 100755
--- a/src/library/scala/reflect/generic/Trees.scala
+++ b/src/library/scala/reflect/generic/Trees.scala
@@ -4,7 +4,7 @@ package generic
import java.io.{ PrintWriter, StringWriter }
import Flags._
-trait Trees { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Trees { self: Universe =>
abstract class AbsTreePrinter(out: PrintWriter) {
def print(tree: Tree)
diff --git a/src/library/scala/reflect/generic/Types.scala b/src/library/scala/reflect/generic/Types.scala
index c0eded6ab7..837f5484db 100755
--- a/src/library/scala/reflect/generic/Types.scala
+++ b/src/library/scala/reflect/generic/Types.scala
@@ -1,7 +1,7 @@
package scala.reflect
package generic
-trait Types { self: Universe =>
+@deprecated("scala.reflect.generic will be removed", "2.9.1") trait Types { self: Universe =>
abstract class AbsType {
def typeSymbol: Symbol
diff --git a/src/library/scala/reflect/generic/UnPickler.scala b/src/library/scala/reflect/generic/UnPickler.scala
index 6fca886e64..1a8e0ae0f5 100755
--- a/src/library/scala/reflect/generic/UnPickler.scala
+++ b/src/library/scala/reflect/generic/UnPickler.scala
@@ -18,6 +18,7 @@ import annotation.switch
/** @author Martin Odersky
* @version 1.0
*/
+@deprecated("scala.reflect.generic will be removed", "2.9.1")
abstract class UnPickler {
val global: Universe
@@ -65,24 +66,37 @@ abstract class UnPickler {
//println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+ // Laboriously unrolled for performance.
def run() {
- // read children last, fix for #3951
- val queue = new collection.mutable.ListBuffer[() => Unit]()
- def delay(i: Int, action: => Unit) {
- queue += (() => at(i, {() => action; null}))
+ var i = 0
+ while (i < index.length) {
+ if (entries(i) == null && isSymbolEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ entries(i) = readSymbol()
+ readIndex = savedIndex
+ }
+ i += 1
}
-
- for (i <- 0 until index.length) {
- if (isSymbolEntry(i))
- at(i, readSymbol)
- else if (isSymbolAnnotationEntry(i))
- delay(i, readSymbolAnnotation())
- else if (isChildrenEntry(i))
- delay(i, readChildren())
+ // read children last, fix for #3951
+ i = 0
+ while (i < index.length) {
+ if (entries(i) == null) {
+ if (isSymbolAnnotationEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readSymbolAnnotation()
+ readIndex = savedIndex
+ }
+ else if (isChildrenEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readChildren()
+ readIndex = savedIndex
+ }
+ }
+ i += 1
}
-
- for (action <- queue)
- action()
}
private def checkVersion() {
diff --git a/src/library/scala/reflect/generic/Universe.scala b/src/library/scala/reflect/generic/Universe.scala
index 101295ae79..4bc70044ae 100755
--- a/src/library/scala/reflect/generic/Universe.scala
+++ b/src/library/scala/reflect/generic/Universe.scala
@@ -1,6 +1,7 @@
package scala.reflect
package generic
+@deprecated("scala.reflect.generic will be removed", "2.9.1")
abstract class Universe extends Symbols
with Types
with Constants
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index 5be2aeeb84..8be2745086 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -12,4 +12,6 @@ package scala.runtime
import scala.util.control.ControlThrowable
-class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable
+class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable {
+ final override def fillInStackTrace(): Throwable = this
+}
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 41a63777c5..0a42492443 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -32,6 +32,22 @@ object ScalaRunTime {
def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ /** Return the class object representing an unboxed value type,
+ * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
+ * rewrites expressions like 5.getClass to come here.
+ */
+ def anyValClass[T <: AnyVal](value: T): Class[T] = (value match {
+ case x: Byte => java.lang.Byte.TYPE
+ case x: Short => java.lang.Short.TYPE
+ case x: Char => java.lang.Character.TYPE
+ case x: Int => java.lang.Integer.TYPE
+ case x: Long => java.lang.Long.TYPE
+ case x: Float => java.lang.Float.TYPE
+ case x: Double => java.lang.Double.TYPE
+ case x: Boolean => java.lang.Boolean.TYPE
+ case x: Unit => java.lang.Void.TYPE
+ }).asInstanceOf[Class[T]]
+
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = xs match {
case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index f5a844d2c9..dff29c86b4 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -18,6 +18,10 @@ package scala.util.control
*/
trait NoStackTrace extends Throwable {
override def fillInStackTrace(): Throwable =
- if (sys.SystemProperties.noTraceSupression) super.fillInStackTrace()
+ if (NoStackTrace.noSuppression) super.fillInStackTrace()
else this
}
+
+object NoStackTrace {
+ final val noSuppression = sys.SystemProperties.noTraceSupression.value
+}
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index 51a2fda6aa..8b7137eed1 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -132,7 +132,7 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
// consumer/iterator interface - we need not synchronize access to buffer
// because we required there to be only one consumer.
- def hasNext() = !eos && (buffer != null || fillBuffer)
+ def hasNext = !eos && (buffer != null || fillBuffer)
def next() = {
if (eos) throw new NoSuchElementException("ProducerConsumerIterator")
if (buffer == null) fillBuffer
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index 3957192f70..953823ef77 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -143,7 +143,7 @@ class ConsoleRunner extends DirectRunner {
val dir =
if (fileManager.testClasses.isDefined) fileManager.testClassesDir
else fileManager.testBuildFile getOrElse {
- fileManager.latestCompFile.getParentFile.getParentFile.getCanonicalFile
+ fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
}
val vmBin = javaHome + File.separator + "bin"
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index e23063640f..a9bf186a44 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -27,7 +27,7 @@ trait FileManager {
*/
def compareFiles(f1: File, f2: File): String = {
val diffWriter = new StringWriter
- val args = Array(f1.getCanonicalPath(), f2.getCanonicalPath())
+ val args = Array(f1.getAbsolutePath(), f2.getAbsolutePath())
DiffPrint.doDiff(args, diffWriter)
val res = diffWriter.toString
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index bfa6427d21..88ea62353e 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -525,7 +525,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
val succFn: (File, File) => Boolean = { (logFile, outDir) =>
NestUI.verbose("compilation of "+file+" succeeded\n")
- val outURL = outDir.getCanonicalFile.toURI.toURL
+ val outURL = outDir.getAbsoluteFile.toURI.toURL
val logWriter = new PrintStream(new FileOutputStream(logFile), true)
Output.withRedirected(logWriter) {
@@ -616,7 +616,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
// create proper settings for the compiler
val settings = new Settings(workerError)
- settings.outdir.value = outDir.getCanonicalFile.getAbsolutePath
+ settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
settings.sourcepath.value = sourcepath
settings.classpath.value = fileManager.CLASSPATH
settings.Ybuildmanagerdebug.value = true
@@ -723,12 +723,12 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
// run compiler in resident mode
// $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getCanonicalFile
+ val sourcedir = logFile.getParentFile.getAbsoluteFile
val sourcepath = sourcedir.getAbsolutePath+File.separator
NestUI.verbose("sourcepath: "+sourcepath)
val argString =
- "-d "+outDir.getCanonicalFile.getAbsolutePath+
+ "-d "+outDir.getAbsoluteFile.getPath+
" -Xresident"+
" -sourcepath "+sourcepath
val argList = argString split ' ' toList
@@ -976,7 +976,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
react {
case Timeout(file) =>
- updateStatus(file.getCanonicalPath, TestState.Timeout)
+ updateStatus(file.getAbsolutePath, TestState.Timeout)
val swr = new StringWriter
val wr = new PrintWriter(swr, true)
printInfoStart(file, wr)
@@ -988,7 +988,7 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
case Result(file, logs) =>
val state = if (succeeded) TestState.Ok else TestState.Fail
- updateStatus(file.getCanonicalPath, state)
+ updateStatus(file.getAbsolutePath, state)
reportResult(
state,
logs.file,
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 9ddb521a10..24f26e2b60 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -126,13 +126,14 @@ class Main {
/**
* The short name of the package (without prefix)
*/
- def name: String = ""
- def asURLs = Nil
+ def name = ""
+ def asURLs = Nil
def asClasspathString = ""
- val context = DefaultJavaContext
- val classes: List[ClassRep] = Nil
- val packages: List[ClassPath[AbstractFile]] = Nil
- val sourcepaths: List[AbstractFile] = Nil
+
+ val context = DefaultJavaContext
+ val classes = IndexedSeq[ClassRep]()
+ val packages = IndexedSeq[ClassPath[AbstractFile]]()
+ val sourcepaths = IndexedSeq[AbstractFile]()
}
}
diff --git a/src/swing/scala/swing/RichWindow.scala b/src/swing/scala/swing/RichWindow.scala
index 5cb6dfdf33..721172f333 100644
--- a/src/swing/scala/swing/RichWindow.scala
+++ b/src/swing/scala/swing/RichWindow.scala
@@ -36,13 +36,13 @@ sealed trait RichWindow extends Window {
def peer: AWTWindow with InterfaceMixin
trait InterfaceMixin extends super.InterfaceMixin {
- def getJMenuBar: JMenuBar
+ def getJMenuBar(): JMenuBar
def setJMenuBar(b: JMenuBar)
def setUndecorated(b: Boolean)
def setTitle(s: String)
- def getTitle: String
+ def getTitle(): String
def setResizable(b: Boolean)
- def isResizable: Boolean
+ def isResizable(): Boolean
}
def title: String = peer.getTitle
diff --git a/test/files/pos/spec-List.scala b/test/disabled/pos/spec-List.scala
index 04ab7d1543..04ab7d1543 100644
--- a/test/files/pos/spec-List.scala
+++ b/test/disabled/pos/spec-List.scala
diff --git a/test/files/jvm/actor-sync-send-timeout.scala b/test/files/jvm/actor-sync-send-timeout.scala
new file mode 100644
index 0000000000..ed330900b2
--- /dev/null
+++ b/test/files/jvm/actor-sync-send-timeout.scala
@@ -0,0 +1,47 @@
+import scala.actors.Actor
+
+/* This test is a regression test for SI-4759.
+ */
+object Test {
+ val Runs = 5
+
+ def main(args: Array[String]) = {
+ var i = 0
+ while (i < Runs) {
+ i += 1
+ A1 ! 1
+ Thread.sleep(500)
+ }
+ //println("done sending to A1")
+ }
+}
+
+object A2 extends Actor {
+ this.start()
+ def act() {
+ loop {
+ react {
+ case 'stop =>
+ //println("A2 exiting")
+ exit()
+ case _ =>
+ }
+ }
+ }
+}
+
+object A1 extends Actor {
+ this.start()
+ def act() {
+ var i = 0
+ loopWhile(i < Test.Runs) {
+ i += 1
+ react {
+ case any =>
+ A2 !? (500, any)
+ if (i == Test.Runs)
+ A2 ! 'stop
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/manifests.check b/test/files/jvm/manifests.check
index 3b8ca5b5b1..54f504b929 100644
--- a/test/files/jvm/manifests.check
+++ b/test/files/jvm/manifests.check
@@ -29,7 +29,7 @@ x=Foo, m=Foo[scala.collection.immutable.List[Int]]
x=Foo, m=Foo[Foo[Int]]
x=Foo, m=Foo[scala.collection.immutable.List[Foo[Int]]]
-x=Test1$$anon$1, m=Test1$$anon$1
+x=Test1$$anon$1, m=Object with Bar[java.lang.String]
()=()
true=true
diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check
new file mode 100644
index 0000000000..42b98ac026
--- /dev/null
+++ b/test/files/neg/abstract-inaccessible.check
@@ -0,0 +1,13 @@
+abstract-inaccessible.scala:5: error: method implementMe in trait YourTrait references private[foo] trait Bippy.
+Classes which cannot access Bippy may be unable to provide a concrete implementation of implementMe.
+ def implementMe(f: Int => (String, Bippy)): Unit
+ ^
+abstract-inaccessible.scala:6: error: method overrideMe in trait YourTrait references private[foo] trait Bippy.
+Classes which cannot access Bippy may be unable to override overrideMe.
+ def overrideMe[T <: Bippy](x: T): T = x
+ ^
+abstract-inaccessible.scala:7: error: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
+Classes which cannot access Bippy may be unable to override overrideMeAlso.
+ def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
+ ^
+three errors found
diff --git a/test/files/neg/abstract-inaccessible.flags b/test/files/neg/abstract-inaccessible.flags
new file mode 100644
index 0000000000..6c1dd108ae
--- /dev/null
+++ b/test/files/neg/abstract-inaccessible.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/abstract-inaccessible.scala b/test/files/neg/abstract-inaccessible.scala
new file mode 100644
index 0000000000..3c80f30522
--- /dev/null
+++ b/test/files/neg/abstract-inaccessible.scala
@@ -0,0 +1,9 @@
+package foo {
+ private[foo] trait Bippy { }
+
+ trait YourTrait {
+ def implementMe(f: Int => (String, Bippy)): Unit
+ def overrideMe[T <: Bippy](x: T): T = x
+ def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
+ }
+}
diff --git a/test/files/neg/bug278.check b/test/files/neg/bug278.check
index ad0a97371e..b1041b7895 100644
--- a/test/files/neg/bug278.check
+++ b/test/files/neg/bug278.check
@@ -1,5 +1,5 @@
bug278.scala:5: error: overloaded method value a with alternatives:
- => (C.this.A) => Unit <and>
+ => C.this.A => Unit <and>
=> () => Unit
does not take type parameters
a[A]
diff --git a/test/files/neg/bug4419.check b/test/files/neg/bug4419.check
new file mode 100644
index 0000000000..488be9a591
--- /dev/null
+++ b/test/files/neg/bug4419.check
@@ -0,0 +1,4 @@
+bug4419.scala:2: error: forward reference extends over definition of value b
+ { val b = a; val a = 1 }
+ ^
+one error found
diff --git a/test/files/neg/bug4419.scala b/test/files/neg/bug4419.scala
new file mode 100644
index 0000000000..38a34be489
--- /dev/null
+++ b/test/files/neg/bug4419.scala
@@ -0,0 +1,3 @@
+class A {
+ { val b = a; val a = 1 }
+} \ No newline at end of file
diff --git a/test/files/neg/bug4533.check b/test/files/neg/bug4533.check
deleted file mode 100644
index b405619831..0000000000
--- a/test/files/neg/bug4533.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4533.scala:6: error: trait GenSetLike in package collection cannot be accessed in package collection
- def statusByAlarms(alarms: GenSetLike[FooAlarm]) = println("hello")
- ^
-one error found
diff --git a/test/files/neg/bug4533.scala b/test/files/neg/bug4533.scala
deleted file mode 100644
index 425c958328..0000000000
--- a/test/files/neg/bug4533.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package demo
-
-import scala.collection._
-
-class CrashDemo {
- def statusByAlarms(alarms: GenSetLike[FooAlarm]) = println("hello")
-}
-class FooAlarm { }
diff --git a/test/files/neg/bug4584.check b/test/files/neg/bug4584.check
new file mode 100644
index 0000000000..0008d9efa5
--- /dev/null
+++ b/test/files/neg/bug4584.check
@@ -0,0 +1,4 @@
+bug4584.scala:1: error: incomplete unicode escape
+class A { val \u2
+ ^
+one error found
diff --git a/test/files/neg/bug4584.scala b/test/files/neg/bug4584.scala
new file mode 100644
index 0000000000..b34aba91a2
--- /dev/null
+++ b/test/files/neg/bug4584.scala
@@ -0,0 +1 @@
+class A { val \u2 \ No newline at end of file
diff --git a/test/files/neg/bug4727.check b/test/files/neg/bug4727.check
new file mode 100644
index 0000000000..cac35b1548
--- /dev/null
+++ b/test/files/neg/bug4727.check
@@ -0,0 +1,11 @@
+bug4727.scala:5: error: type mismatch;
+ found : Null
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
+ and method Integer2int in object Predef of type (x: java.lang.Integer)Int
+ are possible conversion functions from Null to Int
+Error occurred in an application involving default arguments.
+ new C[Int]
+ ^
+one error found
diff --git a/test/files/neg/bug4727.scala b/test/files/neg/bug4727.scala
new file mode 100644
index 0000000000..40c06713ca
--- /dev/null
+++ b/test/files/neg/bug4727.scala
@@ -0,0 +1,7 @@
+class C[T](x : T = null)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new C[Int]
+ }
+}
diff --git a/test/files/neg/bug588.check b/test/files/neg/bug588.check
index 5f55c95619..8c01ac5b09 100644
--- a/test/files/neg/bug588.check
+++ b/test/files/neg/bug588.check
@@ -1,6 +1,6 @@
bug588.scala:3: error: double definition:
-method visit:(f: (Int) => String)Boolean and
-method visit:(f: (Int) => Unit)Boolean at line 2
+method visit:(f: Int => String)Boolean and
+method visit:(f: Int => Unit)Boolean at line 2
have same type after erasure: (f: Function1)Boolean
def visit(f: Int => String): Boolean
^
diff --git a/test/files/neg/bug752.check b/test/files/neg/bug752.check
index dddab530e4..9262f38f01 100644
--- a/test/files/neg/bug752.check
+++ b/test/files/neg/bug752.check
@@ -1,6 +1,6 @@
bug752.scala:6: error: type mismatch;
- found : (String) => Unit
- required: (Int) => Unit
+ found : String => Unit
+ required: Int => Unit
f(g _)
^
one error found
diff --git a/test/files/neg/divergent-implicit.check b/test/files/neg/divergent-implicit.check
index 07af8510d9..5f20df1b91 100644
--- a/test/files/neg/divergent-implicit.check
+++ b/test/files/neg/divergent-implicit.check
@@ -3,15 +3,15 @@ divergent-implicit.scala:4: error: type mismatch;
required: String
val x1: String = 1
^
-divergent-implicit.scala:5: error: diverging implicit expansion for type (Int) => String
+divergent-implicit.scala:5: error: diverging implicit expansion for type Int => String
starting with method cast in object Test1
val x2: String = cast[Int, String](1)
^
-divergent-implicit.scala:14: error: diverging implicit expansion for type (Test2.Baz) => Test2.Bar
+divergent-implicit.scala:14: error: diverging implicit expansion for type Test2.Baz => Test2.Bar
starting with method baz2bar in object Test2
val x: Bar = new Foo
^
-divergent-implicit.scala:15: error: diverging implicit expansion for type (Test2.Foo) => Test2.Bar
+divergent-implicit.scala:15: error: diverging implicit expansion for type Test2.Foo => Test2.Bar
starting with method foo2bar in object Test2
val y: Bar = new Baz
^
diff --git a/test/files/neg/nullary-override.check b/test/files/neg/nullary-override.check
new file mode 100644
index 0000000000..6b2ded2d4a
--- /dev/null
+++ b/test/files/neg/nullary-override.check
@@ -0,0 +1,4 @@
+nullary-override.scala:2: error: non-nullary method overrides nullary method
+class B extends A { override def x(): Int = 4 }
+ ^
+one error found
diff --git a/test/files/neg/nullary-override.flags b/test/files/neg/nullary-override.flags
new file mode 100644
index 0000000000..6c1dd108ae
--- /dev/null
+++ b/test/files/neg/nullary-override.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/nullary-override.scala b/test/files/neg/nullary-override.scala
new file mode 100644
index 0000000000..3eb4784a0c
--- /dev/null
+++ b/test/files/neg/nullary-override.scala
@@ -0,0 +1,3 @@
+class A { def x: Int = 3 }
+class B extends A { override def x(): Int = 4 }
+
diff --git a/test/files/neg/t0003.check b/test/files/neg/t0003.check
index fb5bb5671d..1913dde9dd 100644
--- a/test/files/neg/t0003.check
+++ b/test/files/neg/t0003.check
@@ -1,6 +1,6 @@
t0003.scala:2: error: type mismatch;
- found : (A) => (B) => B
- required: (A) => B
+ found : A => B => B
+ required: A => B
def foo[A, B, C](l: List[A], f: A => B=>B, g: B=>B=>C): List[C] = l map (g compose f)
^
one error found
diff --git a/test/files/neg/t0015.check b/test/files/neg/t0015.check
index eb25fc46c8..43adc22f72 100644
--- a/test/files/neg/t0015.check
+++ b/test/files/neg/t0015.check
@@ -1,6 +1,6 @@
t0015.scala:5: error: type mismatch;
found : () => Nothing
- required: (Nothing) => ?
+ required: Nothing => ?
Nil.map(f _)
^
one error found
diff --git a/test/files/neg/t2179.check b/test/files/neg/t2179.check
deleted file mode 100644
index aa94fabe1f..0000000000
--- a/test/files/neg/t2179.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t2179.scala:2: error: inferred type arguments [scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}] do not conform to method reduceLeft's type parameter bounds [B >: List[Double]]
- (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
- ^
-t2179.scala:2: error: type mismatch;
- found : (Any, Any) => scala.collection.immutable.IndexedSeq[Double]
- required: (scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}, List[Double]) => scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}
- (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
- ^
-two errors found
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
index f19a901859..771624e8d9 100644
--- a/test/files/neg/t2641.check
+++ b/test/files/neg/t2641.check
@@ -19,7 +19,17 @@ t2641.scala:25: error: something is wrong (wrong class file?): trait ManagedSeq
t2641.scala:27: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = namer
trait Sliced extends Transformed[A] with super.Sliced {
^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass ManagedSeqStrict
+ of the mixin trait Transformed
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass Object
+ of the mixin trait Sliced
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
t2641.scala:28: error: value managedIterator is not a member of ManagedSeq
override def managedIterator = self.managedIterator slice (from, until)
^
-7 errors found
+9 errors found
diff --git a/test/files/neg/t3507.check b/test/files/neg/t3507.check
index ab38280c1f..8e538e4a8b 100644
--- a/test/files/neg/t3507.check
+++ b/test/files/neg/t3507.check
@@ -1,4 +1,4 @@
-t3507.scala:13: error: No Manifest available for object _1.b.c.
+t3507.scala:13: error: No Manifest available for _1.b.c.type.
mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
^
one error found
diff --git a/test/files/neg/t3774.check b/test/files/neg/t3774.check
deleted file mode 100644
index cce2d7076c..0000000000
--- a/test/files/neg/t3774.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t3774.scala:4: error: overloaded method value ++ with alternatives:
- [B1 >: List[Int]](xs: scala.collection.GenTraversableOnce[((Int, Int), B1)])scala.collection.immutable.Map[(Int, Int),B1] <and>
- [B >: ((Int, Int), List[Int]), That](that: scala.collection.GenTraversableOnce[B])(implicit bf: scala.collection.generic.CanBuildFrom[scala.collection.immutable.Map[(Int, Int),List[Int]],B,That])That
- cannot be applied to (scala.collection.immutable.IndexedSeq[((Int, Int), scala.collection.immutable.Range.Inclusive)])
- Map[(Int,Int),List[Int]]() ++ (for(x <- 0 to 1 ; y <- 0 to 1) yield {(x,y)-> (0 to 1)})
- ^
-one error found
diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check
index 2616fd35f9..10fe1861f5 100644
--- a/test/files/neg/unit2anyref.check
+++ b/test/files/neg/unit2anyref.check
@@ -1,8 +1,8 @@
unit2anyref.scala:2: error: type mismatch;
found : Unit
required: AnyRef
-Note: primitive types are not implicitly converted to AnyRef.
-You can safely force boxing by casting x.asInstanceOf[AnyRef].
+Note: Unit is not implicitly converted to AnyRef. You can safely
+pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so.
val x: AnyRef = () // this should not succeed.
^
one error found
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index d395e45e4e..4eaab56cef 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -7,7 +7,7 @@ variances.scala:14: error: covariant type A occurs in contravariant position in
variances.scala:16: error: covariant type A occurs in invariant position in supertype test.C[A] with ScalaObject of object Baz
object Baz extends C[A]
^
-variances.scala:63: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: (A) => A} of value x
+variances.scala:63: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x
val x: T[A] {
^
variances.scala:79: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
diff --git a/test/files/pos/bug1439.flags b/test/files/pos/bug1439.flags
new file mode 100644
index 0000000000..779916d58f
--- /dev/null
+++ b/test/files/pos/bug1439.flags
@@ -0,0 +1 @@
+-unchecked -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t1439.scala b/test/files/pos/bug1439.scala
index ae1fdd10b5..68a7332b2a 100644
--- a/test/files/pos/t1439.scala
+++ b/test/files/pos/bug1439.scala
@@ -1,4 +1,5 @@
-class View[C[A]] {}
+// no unchecked warnings
+class View[C[A]] { }
object Test {
null match {
diff --git a/test/files/pos/bug2094.scala b/test/files/pos/bug2094.scala
new file mode 100644
index 0000000000..ff142117b2
--- /dev/null
+++ b/test/files/pos/bug2094.scala
@@ -0,0 +1,31 @@
+object Test extends App {
+ // compiles:
+ Map[Int, Value](
+ 0 -> KnownType(classOf[Object]),
+ 1 -> UnknownValue())
+
+ // does not compile:
+ Map(
+ 0 -> KnownType(classOf[Object]),
+ 1 -> UnknownValue())
+
+ // Experiment.scala:10: error: type mismatch;
+ // found : (Int, KnownType)
+ // required: (Int, Product with Value{def getType: Option[java.lang.Class[_$$2]]}) where type _$$2
+ // 0 -> KnownType(classOf[Object]),
+ // ^
+ // one error found
+}
+sealed trait Value {
+ def getType: Option[Class[_]]
+}
+
+case class UnknownValue() extends Value {
+ def getType = None
+ // compiles if changed to:
+ // def getType: Option[Class[_]] = None
+}
+
+case class KnownType(typ: Class[_]) extends Value {
+ def getType = Some(typ)
+} \ No newline at end of file
diff --git a/test/files/pos/bug3048.scala b/test/files/pos/bug3048.scala
new file mode 100644
index 0000000000..dc056ecba2
--- /dev/null
+++ b/test/files/pos/bug3048.scala
@@ -0,0 +1,8 @@
+class B
+object C extends B
+
+class F[T <: B](cons: => T)
+class F2[T <: B](cons: => T) extends F(cons)
+
+object D extends F2(C) // works
+object E extends F2(new B {})
diff --git a/test/files/pos/bug3343.scala b/test/files/pos/bug3343.scala
new file mode 100644
index 0000000000..6c34cdff00
--- /dev/null
+++ b/test/files/pos/bug3343.scala
@@ -0,0 +1,15 @@
+import scala.collection.mutable.{ Builder, ListBuffer }
+
+object Test {
+ class Converter[T]
+ object SimpleIntConverter extends Converter[Int]
+
+ class TraversableConverter[T, Coll[X] <: Traversable[X]](converter: Converter[T], builder: Builder[T, Coll[T]]) extends Converter[Coll[T]] {
+ def convert(x: T): List[T] = List(x)
+ }
+ val tc: Converter[List[Int]] = new TraversableConverter(SimpleIntConverter, new ListBuffer[Int])
+ val tc2 = new TraversableConverter(SimpleIntConverter, new ListBuffer[Int])
+
+ def main(args: Array[String]): Unit = {
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug3528.scala b/test/files/pos/bug3528.scala
new file mode 100644
index 0000000000..ff49b3e929
--- /dev/null
+++ b/test/files/pos/bug3528.scala
@@ -0,0 +1,8 @@
+class A {
+ // 3528 - not fixed
+ // def f1 = List(List(1), Stream(1))
+ // 3528 comments
+ def f2 = List(Set(1,2,3), List(1,2,3))
+ // 2322
+ def f3 = List(null: Range, null: List[Int])
+}
diff --git a/test/files/pos/bug4018.scala b/test/files/pos/bug4018.scala
new file mode 100644
index 0000000000..2b265c5717
--- /dev/null
+++ b/test/files/pos/bug4018.scala
@@ -0,0 +1,15 @@
+trait M[V[_]]
+
+class Cls[V[_]](c: M[V])
+
+object Cls{
+ def apply[V[_]](c: M[V]): Cls[V] = new Cls[V](c)
+}
+
+object test {
+ val c: M[Option] = new M[Option] {}
+ new Cls(c) // does not infer.
+ new Cls[Option](c) // okay
+ Cls(c) // okay
+}
+
diff --git a/test/files/pos/bug4237.scala b/test/files/pos/bug4237.scala
new file mode 100644
index 0000000000..fcf6eb8bf1
--- /dev/null
+++ b/test/files/pos/bug4237.scala
@@ -0,0 +1,6 @@
+class A {
+ (new { def field = 0; def field_=(i: Int) = () }).field = 5 // compiles as expected
+ (new { def field(implicit i: Int) = 0; def field_=(i: Int) = () }).field = 5 // compiles even with implicit params on getter
+ (new { def field = 0; def field_=[T](i: Int) = () }).field = 5 // compiles with type param on setter
+ (new { def field[T] = 0; def field_=(i: Int) = () }).field = 5 // DOESN'T COMPILE
+} \ No newline at end of file
diff --git a/test/files/pos/bug4501.scala b/test/files/pos/bug4501.scala
new file mode 100644
index 0000000000..40628f1a4b
--- /dev/null
+++ b/test/files/pos/bug4501.scala
@@ -0,0 +1,14 @@
+// After lub modification
+import scala.collection.mutable.ListBuffer
+
+class A {
+ def foo[T](a:T, b:T):T = a
+ def f1 = foo(ListBuffer(), List())
+ def f2 = foo(ListBuffer(), ListBuffer())
+ def f3 = foo(List(), List())
+
+ // scalap
+ // def f1 : scala.collection.Seq[scala.Nothing] = { /* compiled code */ }
+ // def f2 : scala.collection.mutable.ListBuffer[scala.Nothing] = { /* compiled code */ }
+ // def f3 : scala.collection.immutable.List[scala.Nothing] = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/pos/bug4553.scala b/test/files/pos/bug4553.scala
new file mode 100755
index 0000000000..4eefe57b2b
--- /dev/null
+++ b/test/files/pos/bug4553.scala
@@ -0,0 +1,11 @@
+trait VectorLike[+T, +V[A] <: Vector[A]] {
+ def +[S, VResult[S] >: V[S]](v: VResult[S])
+}
+
+trait Vector[+T] extends VectorLike[T, Vector]
+trait ImmutableVector[T] extends Vector[T] with VectorLike[T, ImmutableVector]
+trait MutableVector[T] extends Vector[T] with VectorLike[T, MutableVector]
+
+object Test {
+ def f = (null: MutableVector[Int]) + (null: ImmutableVector[Int])
+}
diff --git a/test/files/pos/bug4731.scala b/test/files/pos/bug4731.scala
new file mode 100644
index 0000000000..d457543c1f
--- /dev/null
+++ b/test/files/pos/bug4731.scala
@@ -0,0 +1,14 @@
+import java.util.Comparator
+
+trait Trait1[T] { def foo(arg: Comparator[T]): Unit }
+
+trait Trait2[T] extends Trait1[T] { def foo(arg: Comparator[String]): Int = 0 }
+
+class Class1 extends Trait2[String] { }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val c = new Class1
+ c.foo(Ordering[String])
+ }
+}
diff --git a/test/files/pos/hk-infer.scala b/test/files/pos/hk-infer.scala
new file mode 100644
index 0000000000..30e3476404
--- /dev/null
+++ b/test/files/pos/hk-infer.scala
@@ -0,0 +1,37 @@
+object Basis {
+ final case class X[T](t: T)
+ val x = Seq(X(32))
+ val y = Seq(X(true))
+ val x1 = Seq(X("asdf"))
+ val x2 = Seq(X('d'))
+}
+import Basis._
+
+object DoesWork {
+ // Doesn'tWork
+ // def f1 = x ++ y ++ x1 ++ x2
+
+ def f2 = List(x, y, x1, x2).flatten
+}
+
+// Testing the not giving of explicit Booper[M] arguments.
+object ShouldWorkHK {
+ class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] {
+ def underlying = xs
+ def BOOP(ys: Seq[M[_]]) = new Booper(xs ++ ys)
+ }
+ implicit def mkBoop[M[_]](xs: Seq[M[_]]) = new Booper(xs)
+
+ def f1 = x BOOP y BOOP x1 BOOP x2
+}
+
+object DoesWorkHK {
+ class Booper[M[_]](xs: Seq[M[_]]) extends collection.generic.SeqForwarder[M[_]] {
+ def underlying = xs
+ def BOOP(ys: Seq[M[_]]) = new Booper[M](xs ++ ys)
+ }
+ implicit def mkBoop[M[_]](xs: Seq[M[_]]) = new Booper[M](xs)
+
+ def f1 = x BOOP y BOOP x1 BOOP x2
+}
+
diff --git a/test/files/neg/t2179.scala b/test/files/pos/t2179.scala
index 89e22b6e2a..89e22b6e2a 100755
--- a/test/files/neg/t2179.scala
+++ b/test/files/pos/t2179.scala
diff --git a/test/files/neg/t3774.scala b/test/files/pos/t3774.scala
index 2869925b01..2869925b01 100644
--- a/test/files/neg/t3774.scala
+++ b/test/files/pos/t3774.scala
diff --git a/test/files/pos/t4547.scala b/test/files/pos/t4547.scala
new file mode 100644
index 0000000000..01f229bbf6
--- /dev/null
+++ b/test/files/pos/t4547.scala
@@ -0,0 +1,4 @@
+object Main {
+ def g: BigInt = 5 + BigInt(4) // since we're looking for an implicit that converts an int into something that has a + method that takes a BigInt, BigInt should be in the implicit scope
+ def g2 = 5 + BigInt(4)
+} \ No newline at end of file
diff --git a/test/files/pos/t4593.scala b/test/files/pos/t4593.scala
new file mode 100644
index 0000000000..250f68216a
--- /dev/null
+++ b/test/files/pos/t4593.scala
@@ -0,0 +1,20 @@
+// ticket #4593
+trait A {
+
+ class B
+ case object D extends B
+
+ class C {
+
+ var x: B = D
+
+ def y = synchronized {
+ x match {
+ case D => {}
+ }
+ }
+
+ }
+
+}
+
diff --git a/test/files/pos/t4692.scala b/test/files/pos/t4692.scala
new file mode 100644
index 0000000000..409daf2257
--- /dev/null
+++ b/test/files/pos/t4692.scala
@@ -0,0 +1,27 @@
+class TypeAliasVsImplicitTest {
+
+ class For[m[_], a](x: m[a]) {
+ def map[b](y: a => b): m[b] = throw new Error
+ }
+ implicit def toFor[m[_], a](x: m[a]): For[m, a] = new For[m, a](x)
+
+ trait MyList[A]
+
+ def foo(xs: MyList[Int]) = xs.map(x => x) // compiles fine.
+
+ type MyListOfInt = MyList[Int]
+ def bar(xs: MyListOfInt) = xs.map(x => x) // doesn't compile: value map is not a member of TypeAliasVsImplicitTest.this.MyListOfInt
+}
+
+// minimal case -- the bug was in type constructor inference where `xs.type` needed to be widened *and* dealiased
+// in 2.8.1 implicit conversion search started with a widened type, so that combo never came up
+// object Test {
+// class For[m[_], a](x: m[a])
+// def toFor[m[_], a](x: m[a]): For[m, a] = new For[m, a](x)
+//
+// trait MyList[A]
+// type MyListOfInt = MyList[Int]
+//
+// val xs: MyListOfInt = error("")
+// toFor(xs : xs.type)
+// } \ No newline at end of file
diff --git a/test/files/pos/widen-existential.scala b/test/files/pos/widen-existential.scala
new file mode 100644
index 0000000000..d7fa3cc1d8
--- /dev/null
+++ b/test/files/pos/widen-existential.scala
@@ -0,0 +1,7 @@
+class A {
+ { val x = classOf[List[_]] }
+ def f = {
+ val g = classOf[List[_]]
+ List(g, g)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/TestFlatMap.scala b/test/files/run/TestFlatMap.scala
new file mode 100644
index 0000000000..e6fb696aa2
--- /dev/null
+++ b/test/files/run/TestFlatMap.scala
@@ -0,0 +1,29 @@
+import scala.collection.parallel.{ ParMap => PMap }
+import scala.collection.parallel.mutable.{ ParHashSet => PMHashSet, ParHashMap => PMHashMap, ParArray }
+import scala.util.Random
+import scala.collection.parallel.CompositeThrowable
+
+object Test {
+
+ def main(args: Array[String]) {
+ val N = 1500
+ val M = 1500
+ var unmatchedLeft = new PMHashSet[Int]
+ var unmatchedRight = new PMHashSet[Int]
+ Range(0, N).foreach{ x => unmatchedLeft += x}
+ Range(0, M).foreach{ x => unmatchedRight += x}
+
+ try {
+ val matches = unmatchedLeft.flatMap{ lind: Int =>
+ val dists = unmatchedRight.seq.map{ rind: Int =>
+ val dist = Random.nextInt
+ (rind, dist)
+ }
+ dists
+ }
+ } catch {
+ case c: CompositeThrowable => for (t <- c.throwables) println("\n%s\n%s".format(t, t.getStackTrace.mkString("\n")))
+ }
+ }
+
+}
diff --git a/test/files/run/bug2308a.check b/test/files/run/bug2308a.check
new file mode 100644
index 0000000000..888240c702
--- /dev/null
+++ b/test/files/run/bug2308a.check
@@ -0,0 +1 @@
+interface Test$T
diff --git a/test/files/run/bug2308a.scala b/test/files/run/bug2308a.scala
new file mode 100644
index 0000000000..fff158c772
--- /dev/null
+++ b/test/files/run/bug2308a.scala
@@ -0,0 +1,7 @@
+object Test {
+ trait T[M[_]]
+
+ def f1 = classOf[T[X] forSome { type X[_] } ]
+
+ def main(args: Array[String]): Unit = println(f1)
+}
diff --git a/test/files/run/bug4110.check b/test/files/run/bug4110.check
new file mode 100644
index 0000000000..8b005989de
--- /dev/null
+++ b/test/files/run/bug4110.check
@@ -0,0 +1,2 @@
+Object with Test$A with Test$B
+Object with Test$A with Test$B
diff --git a/test/files/run/bug4110.scala b/test/files/run/bug4110.scala
new file mode 100644
index 0000000000..a42646ce52
--- /dev/null
+++ b/test/files/run/bug4110.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ def inferredType[T : Manifest](v : T) = println(manifest[T])
+
+ trait A
+ trait B
+
+ inferredType(new A with B)
+
+ val name = new A with B
+ inferredType(name)
+} \ No newline at end of file
diff --git a/test/files/run/bug4570.check b/test/files/run/bug4570.check
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/test/files/run/bug4570.check
@@ -0,0 +1 @@
+foo
diff --git a/test/files/run/bug4570.scala b/test/files/run/bug4570.scala
new file mode 100644
index 0000000000..5e1a20c52d
--- /dev/null
+++ b/test/files/run/bug4570.scala
@@ -0,0 +1,8 @@
+object Test extends Enumeration {
+ val foo = Value
+ def bar = withName("foo")
+
+ def main(args: Array[String]): Unit = {
+ values foreach println
+ }
+}
diff --git a/test/files/run/bug4656.check b/test/files/run/bug4656.check
new file mode 100644
index 0000000000..15a62794a9
--- /dev/null
+++ b/test/files/run/bug4656.check
@@ -0,0 +1 @@
+List(1, 2, 3)
diff --git a/test/files/run/bug4656.scala b/test/files/run/bug4656.scala
new file mode 100644
index 0000000000..4f3d189c8f
--- /dev/null
+++ b/test/files/run/bug4656.scala
@@ -0,0 +1,13 @@
+object Test {
+ def f = {
+ val buf = new collection.mutable.ListBuffer[Int]
+ buf ++= List(1, 2, 3)
+ val l = buf.toList
+ buf prependToList List(4, 5, 6)
+ l
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f)
+ }
+}
diff --git a/test/files/run/bug4660.scala b/test/files/run/bug4660.scala
new file mode 100644
index 0000000000..e57bb4bf25
--- /dev/null
+++ b/test/files/run/bug4660.scala
@@ -0,0 +1,11 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val traversable = 1 to 20 map (_.toString)
+ def normalize(m: Map[Char, Traversable[String]]) = m.map { case (k,v) => (k, v.toList) }
+
+ val groupedFromView = (traversable view).groupBy(_(0))
+ val groupedFromStrict = traversable.groupBy(_(0))
+
+ assert(normalize(groupedFromView) == normalize(groupedFromStrict))
+ }
+}
diff --git a/test/files/run/bug4697.check b/test/files/run/bug4697.check
new file mode 100644
index 0000000000..b9d569380c
--- /dev/null
+++ b/test/files/run/bug4697.check
@@ -0,0 +1 @@
+50005000
diff --git a/test/files/run/bug4697.scala b/test/files/run/bug4697.scala
new file mode 100644
index 0000000000..95592172e0
--- /dev/null
+++ b/test/files/run/bug4697.scala
@@ -0,0 +1,8 @@
+object Test {
+ var st = Stream(0)
+ for (i <- 1 to 10000) st = i +: st
+
+ def main(args: Array[String]): Unit = {
+ println(st.take(10000).sum)
+ }
+}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 6919eca9bc..66580f063a 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -100,7 +100,7 @@ scala> def m = {
val y : String @Annot(x) = x
y
} // x should not escape the local scope with a narrow type
-m: String @Annot("three")
+m: java.lang.String @Annot(x) forSome { val x: java.lang.String }
scala>
diff --git a/test/files/run/getClassTest.check b/test/files/run/getClassTest.check
new file mode 100644
index 0000000000..94e86c3889
--- /dev/null
+++ b/test/files/run/getClassTest.check
@@ -0,0 +1,18 @@
+f1: java.lang.Class<?>
+f2: java.lang.Class<?>
+f3: java.lang.Class<java.lang.Object>
+f4: java.lang.Class<? extends java.lang.Integer>
+f5: java.lang.Class<?>
+f0: T
+f1: class java.lang.Object
+f2: class java.lang.Object
+f3: class AnyRefs$A
+f4: class AnyRefs$B
+f5: class java.lang.Object
+f6: class java.lang.Object
+f7: class AnyRefs$A
+f8: class AnyRefs$B
+f1: java.lang.Class<? extends MoreAnyRefs$A>
+f2: java.lang.Class<? extends MoreAnyRefs$B>
+f3: java.lang.Class<?>
+f4: java.lang.Class<? extends MoreAnyRefs$A>
diff --git a/test/files/run/getClassTest.scala b/test/files/run/getClassTest.scala
new file mode 100644
index 0000000000..951cc8d931
--- /dev/null
+++ b/test/files/run/getClassTest.scala
@@ -0,0 +1,66 @@
+class AnyVals {
+ def f1 = (5: Any).getClass
+ def f2 = (5: AnyVal).getClass
+ def f3 = 5.getClass
+ def f4 = (5: java.lang.Integer).getClass
+ def f5 = (5.asInstanceOf[AnyRef]).getClass
+
+ // scalap says:
+ //
+ // def f1 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f2 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f3 : java.lang.Class[scala.Int] = { /* compiled code */ }
+ // def f4 : java.lang.Class[?0] forSome {type ?0 <: java.lang.Integer} = { /* compiled code */ }
+ // def f5 : java.lang.Class[?0] forSome {type ?0 <: scala.AnyRef} = { /* compiled code */ }
+ //
+ // java generic signature says:
+ //
+ // f1: java.lang.Class<?>
+ // f2: java.lang.Class<?>
+ // f3: java.lang.Class<java.lang.Object>
+ // f4: java.lang.Class<? extends java.lang.Integer>
+ // f5: java.lang.Class<?>
+}
+
+class AnyRefs {
+ class A
+ class B extends A
+
+ def f1 = (new B: Any).getClass().newInstance()
+ def f2 = (new B: AnyRef).getClass().newInstance()
+ def f3 = (new B: A).getClass().newInstance()
+ def f4 = (new B: B).getClass().newInstance()
+
+ def f0[T >: B] = (new B: T).getClass().newInstance()
+
+ def f5 = f0[Any]
+ def f6 = f0[AnyRef]
+ def f7 = f0[A]
+ def f8 = f0[B]
+}
+
+class MoreAnyRefs {
+ trait A
+ trait B
+
+ // don't leak anon/refinements
+ def f1 = (new A with B { }).getClass()
+ def f2 = (new B with A { }).getClass()
+ def f3 = (new { def bippy() = 5 }).getClass()
+ def f4 = (new A { def bippy() = 5 }).getClass()
+}
+
+object Test {
+ def returnTypes[T: Manifest] = (
+ manifest[T].erasure.getMethods.toList
+ filter (_.getName startsWith "f")
+ sortBy (_.getName)
+ map (m => m.getName + ": " + m.getGenericReturnType.toString)
+ )
+
+ def main(args: Array[String]): Unit = {
+ returnTypes[AnyVals] foreach println
+ returnTypes[AnyRefs] foreach println
+ returnTypes[MoreAnyRefs] foreach println
+ }
+}
diff --git a/test/files/run/null-and-intersect.check b/test/files/run/null-and-intersect.check
new file mode 100644
index 0000000000..81890cfeff
--- /dev/null
+++ b/test/files/run/null-and-intersect.check
@@ -0,0 +1,9 @@
+1
+2
+3
+4
+1
+2
+1
+2
+2
diff --git a/test/files/run/null-and-intersect.scala b/test/files/run/null-and-intersect.scala
new file mode 100644
index 0000000000..7266dabe6d
--- /dev/null
+++ b/test/files/run/null-and-intersect.scala
@@ -0,0 +1,34 @@
+object Test {
+ trait Immortal
+ class Bippy extends Immutable with Immortal
+ class Boppy extends Immutable
+
+ def f[T](x: Traversable[T]) = x match {
+ case _: Map[_, _] => 3
+ case _: Seq[_] => 2
+ case _: Iterable[_] => 1
+ case _ => 4
+ }
+ def g(x: Bippy) = x match {
+ case _: Immutable with Immortal => 1
+ case _ => 2
+ }
+ def h(x: Immutable) = x match {
+ case _: Immortal => 1
+ case _ => 2
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f(Set(1)))
+ println(f(Seq(1)))
+ println(f(Map(1 -> 2)))
+ println(f(null))
+
+ println(g(new Bippy))
+ println(g(null))
+
+ println(h(new Bippy))
+ println(h(new Boppy))
+ println(h(null))
+ }
+}
diff --git a/test/files/run/view-headoption.check b/test/files/run/view-headoption.check
new file mode 100644
index 0000000000..5c98b54b46
--- /dev/null
+++ b/test/files/run/view-headoption.check
@@ -0,0 +1,28 @@
+fail
+success
+f1: Some(5)
+fail
+success
+f2: 5
+fail
+success
+fail
+fail
+success
+fail
+fail
+fail
+success
+f3: Some(5)
+fail
+success
+fail
+success
+fail
+fail
+success
+fail
+fail
+fail
+success
+f4: 5
diff --git a/test/files/run/view-headoption.scala b/test/files/run/view-headoption.scala
new file mode 100644
index 0000000000..659c7e6b82
--- /dev/null
+++ b/test/files/run/view-headoption.scala
@@ -0,0 +1,18 @@
+object Test {
+ val failer = () => { println("fail") ; None }
+ val succeeder = () => { println("success") ; Some(5) }
+ val fs = List(failer, succeeder, failer, failer, succeeder, failer, failer, failer, succeeder)
+
+ def f0 = fs.view flatMap (f => f())
+ def f1 = f0.headOption
+ def f2 = f0.head
+ def f3 = f0.lastOption
+ def f4 = f0.last
+
+ def main(args: Array[String]): Unit = {
+ println("f1: " + f1)
+ println("f2: " + f2)
+ println("f3: " + f3)
+ println("f4: " + f4)
+ }
+}
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 6a0e83a47d..56295f204c 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -130,6 +130,13 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
(t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
}
+ property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+ (r.size == 0) || {
+ val t = r.init
+ (t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
+ }
+ }
+
property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
if (r.size == 0) {
diff --git a/test/files/specialized/fft.check b/test/files/specialized/fft.check
index 69a3a61f36..845729084d 100644
--- a/test/files/specialized/fft.check
+++ b/test/files/specialized/fft.check
@@ -1,4 +1,4 @@
Processing 65536 items
Boxed doubles: 0
Boxed ints: 2
-Boxed longs: 1310921
+Boxed longs: 1245366
diff --git a/test/files/pos/no-widen-locals.scala b/test/pending/pos/no-widen-locals.scala
index 32579404b2..ba568f64eb 100644
--- a/test/files/pos/no-widen-locals.scala
+++ b/test/pending/pos/no-widen-locals.scala
@@ -1,3 +1,5 @@
+// Worked from r23262 until that was reverted somewhere
+// around r25016.
import annotation.switch
object Test {
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
new file mode 100644
index 0000000000..d3ee2bf308
--- /dev/null
+++ b/test/pending/pos/those-kinds-are-high.scala
@@ -0,0 +1,37 @@
+class A {
+ trait Container[+T]
+ trait Template[+CC[X] <: Container[X]]
+
+ class C1[T] extends Template[C1] with Container[T]
+ class C2[T] extends Template[C2] with Container[T]
+
+ /** Target expression:
+ * List(new C1[String], new C2[String])
+ */
+
+ // Here's what would ideally be inferred.
+ //
+ // scala> :type List[Template[Container] with Container[String]](new C1[String], new C2[String])
+ // List[Template[Container] with Container[java.lang.String]]
+ //
+ // Here's what it does infer.
+ //
+ // scala> :type List(new C1[String], new C2[String])
+ // <console>:8: error: type mismatch;
+ // found : C1[String]
+ // required: Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any] with ScalaObject] with ScalaObject] with ScalaObject
+ // List(new C1[String], new C2[String])
+ // ^
+ //
+ // Simplified, the inferred type is:
+ //
+ // List[Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any]]]
+ //
+
+ /** Working version explicitly typed.
+ */
+ def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
+
+ // nope
+ // def fFail = List(new C1[String], new C2[String])
+}
diff --git a/test/scaladoc/scala/IndexScriptTest.scala b/test/scaladoc/scala/IndexScriptTest.scala
new file mode 100644
index 0000000000..991491c376
--- /dev/null
+++ b/test/scaladoc/scala/IndexScriptTest.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.IndexScript
+import java.net.URLClassLoader
+
+object Test extends Properties("IndexScript") {
+
+ def getClasspath = {
+ val loader = Thread.currentThread.getContextClassLoader
+ val paths = loader.asInstanceOf[URLClassLoader].getURLs
+ val morepaths = loader.getParent.asInstanceOf[URLClassLoader].getURLs
+ (paths ++ morepaths).map(_.getPath).mkString(java.io.File.pathSeparator)
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createIndexScript(path: String) =
+ docFactory.makeUniverse(List(path)) match {
+ case Some(universe) => {
+ val index = new IndexScript(universe,
+ indexModelFactory.makeIndex(universe))
+ Some(index)
+ }
+ case _ =>
+ None
+ }
+
+ property("allPackages") = {
+ createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ case Some(index) =>
+ index.allPackages.map(_.toString) == List(
+ "scala",
+ "scala.tools",
+ "scala.tools.nsc",
+ "scala.tools.nsc.doc",
+ "scala.tools.nsc.doc.html",
+ "scala.tools.nsc.doc.html.page"
+ )
+ case None =>
+ false
+ }
+ }
+}
diff --git a/test/scaladoc/scala/IndexTest.scala b/test/scaladoc/scala/IndexTest.scala
index 5e3d02e045..7679bab0c6 100644
--- a/test/scaladoc/scala/IndexTest.scala
+++ b/test/scaladoc/scala/IndexTest.scala
@@ -79,24 +79,4 @@ object Test extends Properties("Index") {
case None => false
}
}
-
- property("allPackages") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
-
- case Some(index) =>
- index.allPackages.map(_.toString) == List(
- "scala",
- "scala.tools",
- "scala.tools.nsc",
- "scala.tools.nsc.doc",
- "scala.tools.nsc.doc.html",
- "scala.tools.nsc.doc.html.page"
- )
-
- case None =>
- false
-
- }
- }
-
}