summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml411
-rw-r--r--src/compiler/scala/tools/ant/Same.scala2
-rw-r--r--src/compiler/scala/tools/ant/antlib.xml2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala16
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala41
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala184
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala30
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala59
-rw-r--r--src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala106
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala53
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala1
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala30
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala)18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala28
-rw-r--r--src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala)5
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala4
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala4
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala4
-rw-r--r--src/compiler/scala/tools/util/Javap.scala694
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala (renamed from src/compiler/scala/tools/nsc/interactive/CompilerControl.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala (renamed from src/compiler/scala/tools/nsc/interactive/ContextTrees.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala (renamed from src/compiler/scala/tools/nsc/interactive/Global.scala)95
-rw-r--r--src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Main.scala34
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala (renamed from src/compiler/scala/tools/nsc/interactive/Picklers.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala (renamed from src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala (renamed from src/compiler/scala/tools/nsc/interactive/REPL.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RangePositions.scala (renamed from src/compiler/scala/tools/nsc/interactive/RangePositions.scala)1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Response.scala (renamed from src/compiler/scala/tools/nsc/interactive/Response.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala (renamed from src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala (renamed from src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/Tester.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/Tester.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala)11
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala)0
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala26
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala14
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala35
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala2667
-rw-r--r--src/reflect/scala/reflect/internal/tpe/CommonOwners.scala50
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala592
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala617
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala282
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala1144
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala29
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--src/repl/scala/tools/nsc/Interpreter.scala (renamed from src/compiler/scala/tools/nsc/Interpreter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/InterpreterLoop.scala (renamed from src/compiler/scala/tools/nsc/InterpreterLoop.scala)0
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala (renamed from src/compiler/scala/tools/nsc/MainGenericRunner.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ByteCode.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ByteCode.scala)10
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CommandLine.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CommandLine.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Completion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Completion.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionAware.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Delimited.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Delimited.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Formatting.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Formatting.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ILoop.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala (renamed from src/compiler/scala/tools/nsc/interpreter/IMain.scala)13
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ISettings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ISettings.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Imports.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineReader.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala693
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Logger.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Logger.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala (renamed from src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala (renamed from src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/NamedParam.scala (renamed from src/compiler/scala/tools/nsc/interpreter/NamedParam.scala)1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Naming.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Parsed.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Parsed.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Pasted.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Pasted.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Phased.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Phased.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Power.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplConfig.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplDir.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplDir.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala)8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplProps.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplVals.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplVals.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Results.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Results.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/RichClass.scala (renamed from src/compiler/scala/tools/nsc/interpreter/RichClass.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/StdReplTags.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala (renamed from src/compiler/scala/tools/nsc/interpreter/package.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/History.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/History.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/package.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/package.scala)0
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala (renamed from src/compiler/scala/tools/ant/Scaladoc.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala (renamed from src/compiler/scala/tools/nsc/ScalaDoc.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/DocFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala (renamed from src/compiler/scala/tools/nsc/doc/DocParser.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/Index.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala234
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala49
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala (renamed from src/compiler/scala/tools/nsc/doc/Settings.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala (renamed from src/compiler/scala/tools/nsc/doc/Uncompilable.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Universe.scala (renamed from src/compiler/scala/tools/nsc/doc/Universe.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala (renamed from src/compiler/scala/tools/nsc/doc/base/LinkTo.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Body.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Generator.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Universer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Doclet.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Page.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala (renamed from src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Index.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Source.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Template.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png)bin6232 -> 6232 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png)bin6220 -> 6220 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png)bin3357 -> 3357 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png)bin7516 -> 7516 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png)bin3910 -> 3910 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png)bin9006 -> 9006 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif)bin167 -> 167 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif)bin1544 -> 1544 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif)bin1341 -> 1341 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png)bin1692 -> 1692 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd)bin30823 -> 30823 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif)bin1462 -> 1462 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png)bin1803 -> 1803 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd)bin31295 -> 31295 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif)bin1324 -> 1324 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif)bin1104 -> 1104 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png)bin965 -> 965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif)bin1366 -> 1366 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif)bin1115 -> 1115 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png)bin1198 -> 1198 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png)bin2441 -> 2441 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png)bin3356 -> 3356 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png)bin7653 -> 7653 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png)bin3903 -> 3903 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png)bin9200 -> 9200 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif)bin1118 -> 1118 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png)bin3335 -> 3335 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png)bin7312 -> 7312 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif)bin1201 -> 1201 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png)bin3186 -> 3186 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd)bin28904 -> 28904 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png)bin1150 -> 1150 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png)bin646 -> 646 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png)bin1380 -> 1380 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png)bin1864 -> 1864 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png)bin1434 -> 1434 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png)bin1965 -> 1965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif)bin1214 -> 1214 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif)bin1209 -> 1209 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png)bin3374 -> 3374 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png)bin7410 -> 7410 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png)bin3882 -> 3882 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png)bin8967 -> 8967 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png)bin1445 -> 1445 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png)bin4236 -> 4236 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png)bin1841 -> 1841 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png)bin4969 -> 4969 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png)bin1879 -> 1879 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Entity.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala (renamed from src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Visibility.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala)0
-rw-r--r--src/scaladoc/scala/tools/partest/ScaladocModelTest.scala (renamed from src/partest/scala/tools/partest/ScaladocModelTest.scala)8
-rw-r--r--test/files/neg/macro-basic-mamdmi.check3
-rw-r--r--test/files/neg/t5753.check3
-rwxr-xr-xtest/files/presentation/doc/doc.scala16
-rw-r--r--test/scaladoc/run/t5527.check (renamed from test/files/run/t5527.check)9
-rw-r--r--test/scaladoc/run/t5527.scala (renamed from test/files/run/t5527.scala)0
-rw-r--r--test/scaladoc/scalacheck/IndexScriptTest.scala2
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala6
-rwxr-xr-xtools/stability-test.sh29
238 files changed, 4396 insertions, 4147 deletions
diff --git a/build.xml b/build.xml
index 6198a6d68e..7e4948c938 100644
--- a/build.xml
+++ b/build.xml
@@ -433,6 +433,7 @@ INITIALISATION
<property name="scalac.args.quickonly" value=""/>
<property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
+ <property name="scalac.args.strap" value="${scalac.args.quick}"/>
<!-- Setting-up Ant contrib tasks -->
<taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
<!-- This is the start time for the distribution -->
@@ -926,7 +927,6 @@ QUICK BUILD (QUICK)
<uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
<srcfiles dir="${src.dir}">
<include name="library/**"/>
- <include name="continuations/**"/>
<include name="swing/**"/>
<include name="actors/**"/>
</srcfiles>
@@ -989,28 +989,14 @@ QUICK BUILD (QUICK)
<include name="**/*.xml"/>
<include name="**/*.js"/>
<include name="**/*.css"/>
+ <include name="rootdoc.txt"/>
</fileset>
</copy>
- </target>
-
- <target name="quick.swing" depends="quick.lib" if="has.java6" unless="quick.lib.available">
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- </target>
-
- <target name="quick.lib.done" depends="quick.swing, quick.lib">
- <stopwatch name="quick.lib.timer" action="total"/>
<touch file="${build-quick.dir}/library.complete" verbose="no"/>
+ <stopwatch name="quick.lib.timer" action="total"/>
</target>
- <target name="quick.pre-reflect" depends="quick.lib.done">
+ <target name="quick.pre-reflect" depends="quick.lib">
<uptodate property="quick.reflect.available" targetfile="${build-quick.dir}/reflect.complete">
<srcfiles dir="${src.dir}">
<include name="reflect/**"/>
@@ -1031,11 +1017,11 @@ QUICK BUILD (QUICK)
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
+ <path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
- <!-- TODO - needed? -->
<propertyfile file="${build-quick.dir}/classes/reflect/reflect.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1109,7 +1095,45 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.comp.timer" action="total"/>
</target>
- <target name="quick.pre-plugins" depends="quick.comp">
+ <target name="quick.pre-repl" depends="quick.comp">
+ <uptodate property="quick.repl.available" targetfile="${build-quick.dir}/repl.complete">
+ <srcfiles dir="${src.dir}/repl" />
+ </uptodate>
+ </target>
+
+ <target name="quick.repl" depends="quick.pre-repl" unless="quick.repl.available">
+ <mkdir dir="${build-quick.dir}/classes/repl"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/repl"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/repl"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
+ <pathelement location="${jline.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/repl.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.swing" depends="quick.comp" if="has.java6" unless="quick.comp.available">
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="quick.compilation.path"/>
+ </scalacfork>
+ </target>
+
+ <target name="quick.pre-plugins" depends="quick.repl" unless="quick.repl.available">
<uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
<srcfiles dir="${src.dir}/continuations"/>
</uptodate>
@@ -1118,6 +1142,7 @@ QUICK BUILD (QUICK)
<target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
<stopwatch name="quick.plugins.timer"/>
<mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <mkdir dir="${build-quick.dir}/classes/continuations-library"/>
<scalacfork
destdir="${build-quick.dir}/classes/continuations-plugin"
compilerpathref="quick.classpath"
@@ -1144,7 +1169,7 @@ QUICK BUILD (QUICK)
</jar>
<!-- might split off library part into its own ant target -->
<scalacfork
- destdir="${build-quick.dir}/classes/library"
+ destdir="${build-quick.dir}/classes/continuations-library"
compilerpathref="quick.classpath"
params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
srcdir="${src.dir}/continuations/library"
@@ -1230,6 +1255,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<path refid="asm.classpath"/>
@@ -1248,6 +1274,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
@@ -1269,7 +1296,86 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.partest.timer" action="total"/>
</target>
- <target name="quick.pre-bin" depends="quick.partest">
+ <target name="quick.pre-scaladoc" depends="quick.partest">
+ <uptodate property="quick.scaladoc.available" targetfile="${build-quick.dir}/scaladoc.complete">
+ <srcfiles dir="${src.dir}/scaladoc" />
+ </uptodate>
+ </target>
+
+ <target name="quick.scaladoc" depends="quick.pre-scaladoc" unless="quick.scaladoc.available">
+ <mkdir dir="${build-quick.dir}/classes/scaladoc"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/scaladoc"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/scaladoc"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${ant.jar}"/>
+ <path refid="forkjoin.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <propertyfile file="${build-quick.dir}/classes/scaladoc/scaladoc.properties">
+ <entry key="version.number" value="${scaladoc.version.number}"/>
+ <entry key="copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ <copy todir="${build-quick.dir}/classes/scaladoc">
+ <fileset dir="${src.dir}/scaladoc">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.txt"/>
+ </fileset>
+ </copy>
+ <touch file="${build-quick.dir}/scaladoc.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.pre-interactive" depends="quick.scaladoc">
+ <uptodate property="quick.interactive.available" targetfile="${build-quick.dir}/interactive.complete">
+ <srcfiles dir="${src.dir}/interactive" />
+ </uptodate>
+ </target>
+
+ <target name="quick.interactive" depends="quick.pre-interactive" unless="quick.interactive.available">
+ <mkdir dir="${build-quick.dir}/classes/interactive"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/interactive"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/interactive"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${build-quick.dir}/classes/interactive"/>
+ <pathelement location="${ant.jar}"/>
+ <path refid="forkjoin.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/interactive.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.pre-bin" depends="quick.interactive">
<condition property="quick.bin.available">
<isset property="quick.comp.available"/>
</condition>
@@ -1280,11 +1386,12 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${jline.jar}"/>
+ <path refid="asm.classpath"/>
<path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
- <path refid="asm.classpath"/>
- <pathelement location="${jline.jar}"/>
</path>
<taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
<mkdir dir="${build-quick.dir}/bin"/>
@@ -1360,6 +1467,7 @@ PACKED QUICK BUILD (PACK)
<exclude name="scala/swing/**"/>
<exclude name="scala/actors/**"/>
</fileset>
+ <fileset dir="${build-quick.dir}/classes/continuations-library"/>
<fileset dir="${build-libs.dir}/classes/forkjoin"/>
</jar>
<jar destfile="${build-pack.dir}/lib/scala-actors.jar">
@@ -1407,6 +1515,9 @@ PACKED QUICK BUILD (PACK)
<mkdir dir="${build-pack.dir}/lib"/>
<jar destfile="${build-pack.dir}/lib/scala-compiler.jar" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
<fileset dir="${build-quick.dir}/classes/compiler"/>
+ <fileset dir="${build-quick.dir}/classes/scaladoc"/>
+ <fileset dir="${build-quick.dir}/classes/interactive"/>
+ <fileset dir="${build-quick.dir}/classes/repl"/>
<fileset dir="${build-asm.dir}/classes"/>
</jar>
<copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
@@ -1530,6 +1641,11 @@ PACKED QUICK BUILD (PACK)
<pathelement location="${jline.jar}"/>
<path refid="lib.extra"/>
</path>
+ <path id="partest.classpath">
+ <path refid="pack.classpath"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
+ </path>
<taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
<taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
</target>
@@ -1714,7 +1830,7 @@ BOOTSTRAPPING BUILD (STRAP)
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
srcpath="${src.dir}/library"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1723,7 +1839,7 @@ BOOTSTRAPPING BUILD (STRAP)
<scalacfork
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1743,28 +1859,15 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.css"/>
</fileset>
</copy>
- </target>
-
- <target name="strap.swing" if="has.java6" unless="strap.lib.available" depends="strap.lib">
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- </target>
-
- <target name="strap.lib.done" depends="strap.swing, strap.lib">
<touch file="${build-strap.dir}/library.complete" verbose="no"/>
<stopwatch name="strap.lib.timer" action="total"/>
</target>
- <target name="strap.pre-reflect" depends="strap.lib.done">
+ <target name="strap.pre-reflect" depends="strap.lib">
<uptodate property="strap.reflect.available" targetfile="${build-strap.dir}/reflect.complete">
- <srcfiles dir="${src.dir}/reflect"/>
+ <srcfiles dir="${src.dir}">
+ <include name="reflect/**"/>
+ </srcfiles>
</uptodate>
</target>
@@ -1807,7 +1910,7 @@ BOOTSTRAPPING BUILD (STRAP)
</fileset>
</copy>
<touch file="${build-strap.dir}/reflect.complete" verbose="no"/>
- <stopwatch name="strap.comp.timer" action="total"/>
+ <stopwatch name="strap.reflect.timer" action="total"/>
</target>
<target name="strap.pre-comp" depends="strap.reflect">
@@ -1822,7 +1925,7 @@ BOOTSTRAPPING BUILD (STRAP)
<scalacfork
destdir="${build-strap.dir}/classes/compiler"
compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/compiler"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1860,153 +1963,7 @@ BOOTSTRAPPING BUILD (STRAP)
<stopwatch name="strap.comp.timer" action="total"/>
</target>
- <target name="strap.pre-plugins" depends="strap.comp">
- <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
-
- <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
- <stopwatch name="strap.plugins.timer"/>
- <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/continuations-plugin"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-strap.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-strap.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-strap.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-strap.dir}/misc/scala-devel/plugins"/>
- </scalacfork>
- <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="strap.plugins.timer" action="total"/>
- </target>
-
- <target name="strap.scalacheck" depends="strap.plugins">
- <mkdir dir="${build-strap.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalacheck"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -nowarn"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
- </target>
-
- <target name="strap.pre-scalap" depends="strap.scalacheck">
- <uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
-
- <target name="strap.scalap" depends="strap.pre-scalap" unless="strap.scalap.available">
- <stopwatch name="strap.scalap.timer"/>
- <mkdir dir="${build-strap.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalap"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="strap.scalap.timer" action="total"/>
- </target>
-
- <target name="strap.pre-partest" depends="strap.scalap, asm.done">
- <uptodate property="strap.partest.available" targetfile="${build-strap.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
-
- <target name="strap.partest" depends="strap.pre-partest" unless="strap.partest.available">
- <stopwatch name="strap.partest.timer"/>
- <mkdir dir="${build-strap.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-strap.dir}/classes/partest"
- target="1.6" source="1.5">
- <classpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <path refid="asm.classpath"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/partest"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <copy todir="${build-strap.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/partest.complete" verbose="no"/>
- <stopwatch name="strap.partest.timer" action="total"/>
- </target>
-
- <target name="strap.done" depends="strap.partest"/>
+ <target name="strap.done" depends="strap.comp"/>
<target name="strap.clean">
<delete dir="${build-strap.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
@@ -2074,6 +2031,8 @@ SBT Compiler Interface
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${sbt.interface.jar}"/>
<path refid="forkjoin.classpath"/>
</compilationpath>
@@ -2370,13 +2329,10 @@ DOCUMENTATION
BOOTRAPING TEST AND TEST SUITE
============================================================================ -->
- <target name="test.stability" depends="strap.done, init">
- <same dir="${build-quick.dir}" todir="${build-strap.dir}" failondifferent="yes">
- <exclude name="**/*.properties"/>
- <exclude name="bin/**"/>
- <exclude name="*.complete"/>
- <exclude name="misc/scala-devel/plugins/*.jar"/>
- </same>
+ <target name="test.stability" depends="pack.done, strap.done">
+ <exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
+ <!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
+ <!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
</target>
<target name="test.classload" depends="pack.done">
@@ -2395,47 +2351,39 @@ BOOTRAPING TEST AND TEST SUITE
</antcall>
</target>
- <target name="test.run" depends="pack.done">
+ <target name="test.pre-run" depends="pack.done">
<property name="partest.srcdir" value="files" />
+ </target>
+
+ <target name="test.run" depends="test.pre-run">
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="1200000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+
+ <compilationpath refid="partest.classpath"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
<jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
</partest>
</target>
- <target name="test.ant" depends="pack.done">
+ <target name="test.ant" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<anttests dir="${partest.dir}/${partest.srcdir}/ant" includes="*build.xml"/>
</partest>
</target>
- <target name="test.suite" depends="pack.done">
+ <target name="test.suite" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
<negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
@@ -2451,14 +2399,10 @@ BOOTRAPING TEST AND TEST SUITE
<instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
<include name="*.scala"/>
</instrumentedtests>
- <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
- <include name="*/*.scala"/>
- </presentationtests>
- <!-- <scripttests dir="${partest.dir}/${partest.srcdir}/script" includes="*.scala"/> -->
</partest>
</target>
- <target name="test.continuations.suite" depends="pack.done">
+ <target name="test.continuations.suite" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
@@ -2466,27 +2410,30 @@ BOOTRAPING TEST AND TEST SUITE
scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
<compilerarg value="-Xpluginsdir"/>
<compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
</partest>
</target>
- <target name="test.scaladoc" depends="pack.done">
+ <target name="test.scaladoc" depends="test.pre-run">
<partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
<scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
</partest>
</target>
- <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.stability, test.sbt"/>
+ <target name="test.interactive" depends="test.pre-run">
+ <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
+ <compilationpath refid="partest.classpath"/>
+ <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
+ <include name="*/*.scala"/>
+ </presentationtests>
+ </partest>
+ </target>
+
+ <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.interactive, test.stability, test.sbt"/>
<!-- ===========================================================================
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index a1f0cda662..6362d28580 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -32,7 +32,7 @@ import org.apache.tools.ant.types.Mapper
*
* @author Gilles Dubochet
* @version 1.0 */
-class Same extends ScalaMatchingTask {
+@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask {
/*============================================================================*\
** Ant user-properties **
\*============================================================================*/
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 78159e6d10..7885534689 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -11,8 +11,6 @@
classname="scala.tools.ant.Scaladoc"/>
<taskdef name="scalatool"
classname="scala.tools.ant.ScalaTool"/>
- <taskdef name="same"
- classname="scala.tools.ant.Same"/>
<taskdef name="pack200"
classname="scala.tools.ant.Pack200Task"/>
</antlib>
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 51fa8f0ab9..7ee3ee551f 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -425,12 +425,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val printInfers = settings.Yinferdebug.value
// phaseName = "parser"
- object syntaxAnalyzer extends {
+ lazy val syntaxAnalyzer = new {
val global: Global.this.type = Global.this
val runsAfter = List[String]()
val runsRightAfter = None
} with SyntaxAnalyzer
+ import syntaxAnalyzer.{ UnitScanner, UnitParser }
+
// !!! I think we're overdue for all these phase objects being lazy vals.
// There's no way for a Global subclass to provide a custom typer
// despite the existence of a "def newTyper(context: Context): Typer"
@@ -997,7 +999,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
object typeDeconstruct extends {
val global: Global.this.type = Global.this
- } with interpreter.StructuredTypeStrings
+ } with typechecker.StructuredTypeStrings
/** There are common error conditions where when the exception hits
* here, currentRun.currentUnit is null. This robs us of the knowledge
@@ -1120,9 +1122,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
}
- def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
- def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
- def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
+ def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newUnitScanner(unit: CompilationUnit): UnitScanner = new UnitScanner(unit)
+ def newUnitParser(unit: CompilationUnit): UnitParser = new UnitParser(unit)
+ def newUnitParser(code: String): UnitParser = newUnitParser(newCompilationUnit(code))
/** A Run is a single execution of the compiler on a sets of units
*/
@@ -1692,8 +1696,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
})
}
- def forInteractive = false
- def forScaladoc = false
def createJavadoc = false
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 08a6adfded..9218ad3330 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -142,9 +142,9 @@ self =>
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- def newScanner = new SourceFileScanner(source)
+ def newScanner(): Scanner = new SourceFileScanner(source)
- val in = newScanner
+ val in = newScanner()
in.init()
private val globalFresh = new FreshNameCreator.Default
@@ -196,10 +196,9 @@ self =>
}
class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+ def this(unit: global.CompilationUnit) = this(unit, Nil)
- def this(unit: global.CompilationUnit) = this(unit, List())
-
- override def newScanner = new UnitScanner(unit, patches)
+ override def newScanner() = new UnitScanner(unit, patches)
override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
@@ -219,6 +218,7 @@ self =>
try body
finally smartParsing = saved
}
+ def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
@@ -244,7 +244,7 @@ self =>
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
- case patches => new UnitParser(unit, patches).parse()
+ case patches => (this withPatches patches).parse()
}
}
}
@@ -650,31 +650,10 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition. */
- def joinComment(trees: => List[Tree]): List[Tree] = {
- val doc = in.flushDoc
- if ((doc ne null) && doc.raw.length > 0) {
- val joined = trees map {
- t =>
- DocDef(doc, t) setPos {
- if (t.pos.isDefined) {
- val pos = doc.pos.withEnd(t.pos.endOrPoint)
- // always make the position transparent
- pos.makeTransparent
- } else {
- t.pos
- }
- }
- }
- joined.find(_.pos.isOpaqueRange) foreach {
- main =>
- val mains = List(main)
- joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
- }
- joined
- }
- else trees
- }
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 19cf1b5093..6ad1c50075 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -9,7 +9,7 @@ import scala.tools.nsc.util.CharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, ArrayBuffer }
import scala.xml.Utility.{ isNameStart }
@@ -83,6 +83,69 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
+ private var openComments = 0
+ protected def putCommentChar(): Unit = nextChar()
+
+ @tailrec private def skipLineComment(): Unit = ch match {
+ case SU | CR | LF =>
+ case _ => nextChar() ; skipLineComment()
+ }
+ private def maybeOpen() {
+ putCommentChar()
+ if (ch == '*') {
+ putCommentChar()
+ openComments += 1
+ }
+ }
+ private def maybeClose(): Boolean = {
+ putCommentChar()
+ (ch == '/') && {
+ putCommentChar()
+ openComments -= 1
+ openComments == 0
+ }
+ }
+ @tailrec final def skipNestedComments(): Unit = ch match {
+ case '/' => maybeOpen() ; skipNestedComments()
+ case '*' => if (!maybeClose()) skipNestedComments()
+ case SU => incompleteInputError("unclosed comment")
+ case _ => putCommentChar() ; skipNestedComments()
+ }
+ def skipDocComment(): Unit = skipNestedComments()
+ def skipBlockComment(): Unit = skipNestedComments()
+
+ private def skipToCommentEnd(isLineComment: Boolean) {
+ nextChar()
+ if (isLineComment) skipLineComment()
+ else {
+ openComments = 1
+ val isDocComment = (ch == '*') && { nextChar(); true }
+ if (isDocComment) {
+ // Check for the amazing corner case of /**/
+ if (ch == '/')
+ nextChar()
+ else
+ skipDocComment()
+ }
+ else skipBlockComment()
+ }
+ }
+
+ /** @pre ch == '/'
+ * Returns true if a comment was skipped.
+ */
+ def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ case _ => false
+ }
+ def flushDoc(): DocComment = null
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = ()
+
def isAtEnd = charOffset >= buf.length
def resume(lastCode: Int) = {
@@ -130,22 +193,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- /** Should doc comments be built? */
- def buildDocs: Boolean = forScaladoc
-
- /** holder for the documentation comment
- */
- var docComment: DocComment = null
-
- def flushDoc: DocComment = {
- val ret = docComment
- docComment = null
- ret
- }
-
- protected def foundComment(value: String, start: Int, end: Int) = ()
- protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
private class TokenData0 extends TokenData
/** we need one token lookahead and one token history
@@ -218,12 +265,15 @@ trait Scanners extends ScannersCommon {
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
- if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docComment = null
+ if (!sepRegions.isEmpty)
+ sepRegions = sepRegions.tail
+
+ discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docComment = null
+
+ discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -516,62 +566,6 @@ trait Scanners extends ScannersCommon {
}
}
- private def skipComment(): Boolean = {
-
- if (ch == '/' || ch == '*') {
-
- val comment = new StringBuilder("/")
- def appendToComment() = comment.append(ch)
-
- if (ch == '/') {
- do {
- appendToComment()
- nextChar()
- } while ((ch != CR) && (ch != LF) && (ch != SU))
- } else {
- docComment = null
- var openComments = 1
- appendToComment()
- nextChar()
- appendToComment()
- var buildingDocComment = false
- if (ch == '*' && buildDocs) {
- buildingDocComment = true
- }
- while (openComments > 0) {
- do {
- do {
- if (ch == '/') {
- nextChar(); appendToComment()
- if (ch == '*') {
- nextChar(); appendToComment()
- openComments += 1
- }
- }
- if (ch != '*' && ch != SU) {
- nextChar(); appendToComment()
- }
- } while (ch != '*' && ch != SU)
- while (ch == '*') {
- nextChar(); appendToComment()
- }
- } while (ch != '/' && ch != SU)
- if (ch == '/') nextChar()
- else incompleteInputError("unclosed comment")
- openComments -= 1
- }
-
- if (buildingDocComment)
- foundDocComment(comment.toString, offset, charOffset - 2)
- }
-
- foundComment(comment.toString, offset, charOffset - 2)
- true
- } else {
- false
- }
- }
-
/** Can token start a statement? */
def inFirstOfStat(token: Int) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
@@ -1233,7 +1227,7 @@ trait Scanners extends ScannersCommon {
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+ class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
@@ -1281,17 +1275,6 @@ trait Scanners extends ScannersCommon {
}
}
}
-
- override def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
-
- override def foundDocComment(value: String, start: Int, end: Int) {
- val docPos = new RangePosition(unit.source, start, start, end)
- docComment = new DocComment(value, docPos)
- unit.comment(docPos, value)
- }
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
@@ -1382,17 +1365,24 @@ trait Scanners extends ScannersCommon {
bpbuf += current
}
}
+ def bracePairString(bp: BracePair, indent: Int): String = {
+ val rangeString = {
+ import bp._
+ val lline = line(loff)
+ val rline = line(roff)
+ val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+ "%s:%s to %s:%s".format(tokens: _*)
+ }
+ val outer = (" " * indent) + rangeString
+ val inners = bp.nested map (bracePairString(_, indent + 2))
- def printBP(bp: BracePair, indent: Int) {
- println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
- if (bp.nested.nonEmpty)
- for (bp1 <- bp.nested) {
- printBP(bp1, indent + 2)
- }
+ if (inners.isEmpty) outer
+ else inners.mkString(outer + "\n", "\n", "")
}
-// println("lineStart = "+lineStart)//DEBUG
-// println("bracepairs = ")
-// for (bp <- bpbuf.toList) printBP(bp, 0)
+ def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+ def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+ log(s"\n$startString\n$bpString")
bpbuf.toList
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index f1bf590ebf..7cf5a07291 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -28,8 +28,8 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
if (unit.body == EmptyTree) {
unit.body =
if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) new UnitParser(unit).parse()
- else new UnitParser(unit).smartParse()
+ else if (reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
}
if (settings.Yrangepos.value && !reporter.hasErrors)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 941ccd9a2d..c1cd3204e0 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -9,7 +9,6 @@ package backend.jvm
import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
import scala.tools.nsc.io._
import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.{ Javap, JavapClass }
import java.util.jar.Attributes.Name
import scala.language.postfixOps
@@ -59,35 +58,6 @@ trait BytecodeWriters {
override def close() = writer.close()
}
- /** To be mixed-in with the BytecodeWriter that generates
- * the class file to be disassembled.
- */
- trait JavapBytecodeWriter extends BytecodeWriter {
- val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
- val cl = ScalaClassLoader.appLoader
-
- def emitJavap(classFile: AbstractFile, javapFile: File) {
- val pw = javapFile.printWriter()
- try {
- val javap = new JavapClass(cl, pw) {
- override def findBytes(path: String): Array[Byte] = classFile.toByteArray
- }
- javap(Seq("-verbose", "-protected", classFile.name)) foreach (_.show())
- } finally pw.close()
- }
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
-
- val classFile = getFile(sym, jclassName, ".class")
- val segments = jclassName.split("[./]")
- val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
- javapFile.parent.createDirectory()
-
- if (Javap.isAvailable(cl)) emitJavap(classFile, javapFile)
- else warning("No javap on classpath, skipping javap output.")
- }
- }
-
trait ClassBytecodeWriter extends BytecodeWriter {
def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
val outfile = getFile(sym, jclassName, ".class")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 388efb4625..4a3d1805d9 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -72,19 +72,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
new DirectToJarfileWriter(f.file)
case _ =>
- import scala.tools.util.Javap
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else if (Javap.isAvailable()) new ClassBytecodeWriter with JavapBytecodeWriter { }
- else {
- warning("No javap on classpath, skipping javap output.")
+ if (settings.Ydumpclasses.isDefault)
new ClassBytecodeWriter { }
- }
-
+ else
+ new ClassBytecodeWriter with DumpBytecodeWriter { }
// TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
// Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
// (a) unreadable pickle;
@@ -2519,7 +2510,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (nextBlock != whereto)
jcode goTo labels(whereto)
// SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
- // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
+ // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
debugwarn("Had a jump only block that wasn't collapsed")
emit(asm.Opcodes.NOP)
@@ -3084,7 +3075,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
assert(nonICode.hasNext, "empty block")
nonICode.next.isInstanceOf[JUMP]
}
-
+
/**
* Returns the list of instructions in a block that follow all ICode only instructions,
* where an ICode only instruction is one that won't make it to the JVM
@@ -3101,7 +3092,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* Returns the target of a block that is "jump only" which is defined
* as being a block that consists only of 0 or more instructions that
* won't make it to the JVM followed by a JUMP.
- *
+ *
* @param b The basic block to examine
* @return Some(target) if b is a "jump only" block or None if it's not
*/
@@ -3150,12 +3141,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
def lookup(b: BasicBlock) = detour.getOrElse(b, b)
-
+
m.code.startBlock = lookup(m.code.startBlock)
-
+
for(eh <- m.exh)
eh.setStartBlock(lookup(eh.startBlock))
-
+
for (b <- m.blocks) {
def replaceLastInstruction(i: Instruction) = {
if (b.lastInstruction != i) {
@@ -3164,18 +3155,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
b.replaceInstruction(idxLast, i)
}
}
-
+
b.lastInstruction match {
case JUMP(whereto) =>
replaceLastInstruction(JUMP(lookup(whereto)))
case CJUMP(succ, fail, cond, kind) =>
replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
- case CZJUMP(succ, fail, cond, kind) =>
+ case CZJUMP(succ, fail, cond, kind) =>
replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
case SWITCH(tags, labels) =>
val newLabels = (labels map lookup)
replaceLastInstruction(SWITCH(tags, newLabels))
- case _ => ()
+ case _ => ()
}
}
}
@@ -3203,7 +3194,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// blocks
for (key <- detour.keySet) {
// we use the Robert Floyd's classic Tortoise and Hare algorithm
- @tailrec
+ @tailrec
def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
if (tortoise == hare)
// cycle detected, map key to key
@@ -3227,7 +3218,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
detour
}
-
+
val detour = computeDetour
rephraseGotos(detour)
@@ -3235,33 +3226,33 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val (remappings, cycles) = detour partition {case (source, target) => source != target}
for ((source, target) <- remappings) {
debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
- if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+ if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
}
val sources = remappings.keySet
val targets = remappings.values.toSet
val intersection = sources intersect targets
-
+
if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
-
+
for ((source, _) <- cycles) {
debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
}
}
}
-
+
/**
* Removes all blocks that are unreachable in a method using a standard reachability analysis.
*/
def elimUnreachableBlocks(m: IMethod) {
- assert(m.hasCode, "code-less method")
-
+ assert(m.hasCode, "code-less method")
+
// assume nothing is reachable until we prove it can be reached
val reachable = mutable.Set[BasicBlock]()
-
+
// the set of blocks that we know are reachable but have
// yet to be marked reachable, initially only the start block
val worklist = mutable.Set(m.startBlock)
-
+
while (worklist.nonEmpty) {
val block = worklist.head
worklist remove block
@@ -3271,7 +3262,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// think are unreachable
worklist ++= (block.successors filterNot reachable)
}
-
+
// exception handlers need to be told not to cover unreachable blocks
// and exception handlers that no longer cover any blocks need to be
// removed entirely
@@ -3282,9 +3273,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
unusedExceptionHandlers += exh
}
}
-
+
// remove the unusued exception handler references
- if (settings.debug.value)
+ if (settings.debug.value)
for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
m.exh = m.exh filterNot unusedExceptionHandlers
diff --git a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala
deleted file mode 100644
index 021e59a879..0000000000
--- a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package doc
-
-import scala.util.control.ControlThrowable
-import reporters.Reporter
-import typechecker.Analyzer
-import scala.reflect.internal.util.BatchSourceFile
-
-trait ScaladocAnalyzer extends Analyzer {
- val global : Global // generally, a ScaladocGlobal
- import global._
-
- override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context)
-
- class ScaladocTyper(context0: Context) extends Typer(context0) {
- private def unit = context.unit
-
- override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
- val sym = docDef.symbol
-
- if ((sym ne null) && (sym ne NoSymbol)) {
- val comment = docDef.comment
- fillDocComment(sym, comment)
- val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_ => typer1 defineUseCases useCase) match {
- case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
- }
- }
-
- super.typedDocDef(docDef, mode, pt)
- }
-
- def defineUseCases(useCase: UseCase): List[Symbol] = {
- def stringParser(str: String): syntaxAnalyzer.Parser = {
- val file = new BatchSourceFile(context.unit.source.file, str) {
- override def positionInUltimateSource(pos: Position) = {
- pos.withSource(context.unit.source, useCase.pos.start)
- }
- }
- val unit = new CompilationUnit(file)
- new syntaxAnalyzer.UnitParser(unit)
- }
-
- val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
- val enclClass = context.enclClass.owner
-
- def defineAlias(name: Name) = (
- if (context.scope.lookup(name) == NoSymbol) {
- lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
- silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
- val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
- val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
- val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
- alias setInfo newInfo
- context.scope.enter(alias)
- }
- }
- }
- )
-
- for (tree <- trees; t <- tree)
- t match {
- case Ident(name) if name startsWith '$' => defineAlias(name)
- case _ =>
- }
-
- useCase.aliases = context.scope.toList
- namer.enterSyms(trees)
- typedStats(trees, NoSymbol)
- useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
-
- if (settings.debug.value)
- useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
-
- useCase.defined
- }
- }
-}
-
-class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends {
- override val useOffsetPositions = false
-} with Global(settings, reporter) {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.packageObjects
- phasesSet += analyzer.typerFactory
- }
- override def forScaladoc = true
- override lazy val analyzer = new {
- val global: ScaladocGlobal.this.type = ScaladocGlobal.this
- } with ScaladocAnalyzer
-}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 1d1469f87d..f9b1e57e66 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
import scala.language.implicitConversions
// Todo merge these better with Scanners
@@ -235,16 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
cbuf.setLength(0)
}
- /** buffer for the documentation comment
- */
- var docBuffer: StringBuilder = null
-
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
private class JavaTokenData0 extends JavaTokenData
/** we need one token lookahead
@@ -587,33 +577,20 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- private def skipComment(): Boolean = {
- if (in.ch == '/') {
- do {
- in.next()
- } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
- true
- } else if (in.ch == '*') {
- docBuffer = null
- in.next()
- val scalaDoc = ("/**", "*/")
- if (in.ch == '*' && forScaladoc)
- docBuffer = new StringBuilder(scalaDoc._1)
- do {
- do {
- if (in.ch != '*' && in.ch != SU) {
- in.next(); putDocChar(in.ch)
- }
- } while (in.ch != '*' && in.ch != SU)
- while (in.ch == '*') {
- in.next(); putDocChar(in.ch)
- }
- } while (in.ch != '/' && in.ch != SU)
- if (in.ch == '/') in.next()
- else incompleteInputError("unclosed comment")
- true
- } else {
- false
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = in.ch match {
+ case CR | LF | SU =>
+ case _ => in.next; skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = in.ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
+ case _ => in.next; skipJavaComment()
+ }
+ in.ch match {
+ case '/' => in.next ; skipLineComment() ; true
+ case '*' => in.next ; skipJavaComment() ; true
+ case _ => false
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 2aee9bd4bc..9469113238 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -188,7 +188,6 @@ trait ScalaSettings extends AbsScalaSettings
val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
- val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 5b5118a94f..61ac07d18f 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -30,6 +30,14 @@ abstract class SymbolLoaders {
member
}
+ protected def signalError(root: Symbol, ex: Throwable) {
+ if (settings.debug.value) ex.printStackTrace()
+ globalError(ex.getMessage() match {
+ case null => "i/o error while loading " + root.name
+ case msg => "error while loading " + root.name + ", " + msg
+ })
+ }
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -168,18 +176,6 @@ abstract class SymbolLoaders {
}
override def complete(root: Symbol) {
- def signalError(ex: Exception) {
- ok = false
- if (settings.debug.value) ex.printStackTrace()
- val msg = ex.getMessage()
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg)
- }
try {
val start = currentTime
val currentphase = phase
@@ -189,11 +185,11 @@ abstract class SymbolLoaders {
ok = true
setSource(root)
setSource(root.companionSymbol) // module -> class, class -> module
- } catch {
- case ex: IOException =>
- signalError(ex)
- case ex: MissingRequirementError =>
- signalError(ex)
+ }
+ catch {
+ case ex @ (_: IOException | _: MissingRequirementError) =>
+ ok = false
+ signalError(root, ex)
}
initRoot(root)
if (!root.isPackageClass) initRoot(root.companionSymbol)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 580f024b40..0af75a2aad 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -683,7 +683,7 @@ trait ContextErrors {
// same reason as for MacroBodyTypecheckException
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
- private def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
+ protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
@@ -772,15 +772,15 @@ trait ContextErrors {
))
}
- def MacroImplementationNotFoundError(expandee: Tree) = {
- val message =
- "macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
- (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
- else "")
- macroExpansionError(expandee, message)
- }
+ def MacroImplementationNotFoundError(expandee: Tree) =
+ macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name))
}
+
+ /** This file will be the death of me. */
+ protected def macroImplementationNotFoundMessage(name: Name): String = (
+ s"""|macro implementation not found: $name
+ |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin
+ )
}
trait InferencerContextErrors {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index d4f402b747..9f16f65a6a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1694,7 +1694,7 @@ trait Infer extends Checkable {
}
else if (sym.isOverloaded) {
val xs = sym.alternatives
- val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
+ val tparams = newAsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 1693bdbc8c..91ebd798e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -43,8 +43,15 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import definitions._
import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
+
def globalSettings = global.settings
+ protected def findMacroClassLoader(): ClassLoader = {
+ val classpath = global.classPath.asURLs
+ macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+ ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ }
+
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -474,21 +481,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Loads classes from from -cp (aka the library classpath).
* Is also capable of detecting REPL and reusing its classloader.
*/
- lazy val macroClassloader: ClassLoader = {
- val classpath = global.classPath.asURLs
- macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
- val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
- // a heuristic to detect the REPL
- if (global.settings.exposeEmptyPackage.value) {
- macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs))
- import scala.tools.nsc.interpreter._
- val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
- new AbstractFileClassLoader(virtualDirectory, loader) {}
- } else {
- loader
- }
- }
+ lazy val macroClassloader: ClassLoader = findMacroClassLoader()
/** Produces a function that can be used to invoke macro implementation for a given macro definition:
* 1) Looks up macro implementation symbol in this universe.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index a1bf3a56c3..007c7c6a83 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -47,10 +47,11 @@ trait Namers extends MethodSynthesis {
private class NormalNamer(context: Context) extends Namer(context)
def newNamer(context: Context): Namer = new NormalNamer(context)
- def newNamerFor(context: Context, tree: Tree): Namer =
- newNamer(context.makeNewScope(tree, tree.symbol))
+ def newNamerFor(context: Context, tree: Tree): Namer = newNamer(context.makeNewScope(tree, tree.symbol))
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
+ // overridden by the presentation compiler
+ def saveDefaultGetter(meth: Symbol, default: Symbol) { }
import NamerErrorGen._
val typer = newTyper(context)
@@ -594,17 +595,6 @@ trait Namers extends MethodSynthesis {
}
}
- def enterIfNotThere(sym: Symbol) {
- val scope = context.scope
- @tailrec def search(e: ScopeEntry) {
- if ((e eq null) || (e.owner ne scope))
- scope enter sym
- else if (e.sym ne sym) // otherwise, aborts since we found sym
- search(e.tail)
- }
- search(scope lookupEntry sym.name)
- }
-
def enterValDef(tree: ValDef) {
if (noEnterGetterSetter(tree))
assignAndEnterFinishedSymbol(tree)
@@ -697,22 +687,9 @@ trait Namers extends MethodSynthesis {
validateCompanionDefs(tree)
}
- // this logic is needed in case typer was interrupted half
- // way through and then comes back to do the tree again. In
- // that case the definitions that were already attributed as
- // well as any default parameters of such methods need to be
- // re-entered in the current scope.
- protected def enterExistingSym(sym: Symbol): Context = {
- if (forInteractive && sym != null && sym.owner.isTerm) {
- enterIfNotThere(sym)
- if (sym.isLazy)
- sym.lazyAccessor andAlso enterIfNotThere
-
- for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
- defAtt.defaultGetters foreach enterIfNotThere
- }
- this.context
- }
+ // Hooks which are overridden in the presentation compiler
+ def enterExistingSym(sym: Symbol): Context = this.context
+ def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
enterSym(tree)
@@ -1297,17 +1274,10 @@ trait Namers extends MethodSynthesis {
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- if (forInteractive && default.owner.isTerm) {
- // save the default getters as attachments in the method symbol. if compiling the
- // same local block several times (which can happen in interactive mode) we might
- // otherwise not find the default symbol, because the second time it the method
- // symbol will be re-entered in the scope but the default parameter will not.
- meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
- case Some(att) => att.defaultGetters += default
- case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
- }
- }
- } else if (baseHasDefault) {
+ if (default.owner.isTerm)
+ saveDefaultGetter(meth, default)
+ }
+ else if (baseHasDefault) {
// the parameter does not have a default itself, but the
// corresponding parameter in the base class does.
sym.setFlag(DEFAULTPARAM)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 1d28add6e0..e8925ce2d0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -186,18 +186,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
- if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
- println("========== scaladoc of "+sym+" =============================")
- println(toJavaDoc(expandedDocComment(sym)))
- for (member <- sym.info.members) {
- println(member+":"+sym.thisType.memberInfo(member)+"\n"+
- toJavaDoc(expandedDocComment(member, sym)))
- for ((useCase, comment, pos) <- useCases(member, sym)) {
- println("usecase "+useCase+":"+useCase.info)
- println(toJavaDoc(comment))
- }
- }
- }
super.transform(tree)
}
transformClassDef
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
index 3ecd3b9ae4..65a3fedbd2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -4,7 +4,7 @@
*/
package scala.tools.nsc
-package interpreter
+package typechecker
import java.lang.{ reflect => r }
import r.TypeVariable
@@ -12,7 +12,6 @@ import scala.reflect.NameTransformer
import NameTransformer._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
-import typechecker.DestructureTypes
/** A more principled system for turning types into strings.
*/
@@ -53,8 +52,7 @@ trait StructuredTypeStrings extends DestructureTypes {
private def shortClass(x: Any) = {
if (settings.debug.value) {
val name = (x.getClass.getName split '.').last
- val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
- val str = if (isAnon) name else (name split '$').last
+ val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last
" // " + str
}
@@ -152,11 +150,11 @@ trait StructuredTypeStrings extends DestructureTypes {
* "definition" is when you want strings like
*/
trait TypeStrings {
+ private type JClass = java.lang.Class[_]
private val ObjectClass = classOf[java.lang.Object]
private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
private val primitiveMap = primitives.toList map { x =>
val key = x match {
- case "void" => "Void"
case "int" => "Integer"
case "char" => "Character"
case s => s.capitalize
@@ -169,6 +167,11 @@ trait TypeStrings {
("java.lang." + key) -> ("scala." + value)
} toMap
+ def isAnonClass(cl: Class[_]) = {
+ val xs = cl.getName.reverse takeWhile (_ != '$')
+ xs.nonEmpty && xs.forall(_.isDigit)
+ }
+
def scalaName(s: String): String = {
if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
else if (s == "void") "scala.Unit"
@@ -178,17 +181,16 @@ trait TypeStrings {
// Trying to put humpty dumpty back together again.
def scalaName(clazz: JClass): String = {
val name = clazz.getName
- val isAnon = clazz.isScalaAnonymous
val enclClass = clazz.getEnclosingClass
def enclPre = enclClass.getName + MODULE_SUFFIX_STRING
def enclMatch = name startsWith enclPre
scalaName(
- if (enclClass == null || isAnon || !enclMatch) name
+ if (enclClass == null || isAnonClass(clazz) || !enclMatch) name
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 959c5a0eb8..eaf57cd39c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -86,8 +86,6 @@ trait Typers extends Adaptations with Tags {
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- private def isPastTyper = phase.id > currentRun.typerPhase.id
-
// To enable decent error messages when the typer crashes.
// TODO - this only catches trees which go through def typed,
// but there are all kinds of back ways - typedClassDef, etc. etc.
@@ -98,13 +96,18 @@ trait Typers extends Adaptations with Tags {
// - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
// - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
// this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
- private def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine }
import TyperErrorGen._
+ /** Overridden to false in scaladoc and/or interactive. */
+ def canAdaptConstantTypeToLiteral = true
+ def canTranslateEmptyListToNil = true
+ def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+
def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
typed(docDef.definition, mode, pt)
@@ -1038,7 +1041,7 @@ trait Typers extends Adaptations with Tags {
tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
- case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
+ case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
@@ -2433,11 +2436,9 @@ trait Typers extends Adaptations with Tags {
if (pat1.tpe.paramSectionCount > 0)
pat1 setType pat1.tpe.finalResultType
- if (forInteractive) {
- for (bind @ Bind(name, _) <- cdef.pat)
- if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
- namer.enterIfNotThere(bind.symbol)
- }
+ for (bind @ Bind(name, _) <- cdef.pat)
+ if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
+ namer.enterIfNotThere(bind.symbol)
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
@@ -3262,7 +3263,7 @@ trait Typers extends Adaptations with Tags {
* forced during kind-arity checking, so it is guarded by additional
* tests to ensure we're sufficiently far along.
*/
- if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
@@ -4688,11 +4689,7 @@ trait Typers extends Adaptations with Tags {
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- def errorTree = tree match {
- case _ if !forInteractive => tree
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
- }
+ def errorTree = missingSelectErrorTree(tree, qual, name)
def asTypeSelection = (
if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
@@ -5255,7 +5252,6 @@ trait Typers extends Adaptations with Tags {
case tree: ApplyDynamic => typedApplyDynamic(tree)
case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala
index e909cd945d..7aef87f387 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala
@@ -3,10 +3,9 @@
*/
package scala.tools.nsc
-package interpreter
+package util
import scala.tools.nsc.io.AbstractFile
-import util.ScalaClassLoader
import java.net.{ URL, URLConnection, URLStreamHandler }
import scala.collection.{ mutable, immutable }
@@ -78,7 +77,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
case null => super.classBytes(name)
case file => file.toByteArray
}
- override def findClass(name: String): JClass = {
+ override def findClass(name: String): Class[_] = {
val bytes = classBytes(name)
if (bytes.length == 0)
throw new ClassNotFoundException(name)
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 4bc393bd0b..f91e94471a 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -13,7 +13,6 @@ import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import scala.reflect.internal.{Flags, Names}
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
import PickleFormat._
@@ -272,7 +271,6 @@ object ShowPickled extends Names {
}
def fromFile(path: String) = fromBytes(io.File(path).toByteArray())
- def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
@@ -287,7 +285,7 @@ object ShowPickled extends Names {
def main(args: Array[String]) {
args foreach { arg =>
- (fromFile(arg) orElse fromName(arg)) match {
+ fromFile(arg) match {
case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index 5c62819f04..6c1821f8aa 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -23,7 +23,7 @@ trait StdTags {
}
})
- private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ protected def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
u.TypeTag[T](
m,
new TypeCreator {
@@ -34,8 +34,6 @@ trait StdTags {
lazy val tagOfString = tagOfStaticClass[String]
lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
- lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
- lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
lazy val tagOfBigInt = tagOfStaticClass[BigInt]
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index df9d907377..e6bbe1dbed 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -5,7 +5,7 @@ import scala.tools.nsc.EXPRmode
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.io.VirtualDirectory
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
+import scala.tools.nsc.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
@@ -283,7 +283,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val file = new BatchSourceFile("<toolbox>", wrappedCode)
val unit = new CompilationUnit(file)
phase = run.parserPhase
- val parser = new syntaxAnalyzer.UnitParser(unit)
+ val parser = newUnitParser(unit)
val wrappedTree = parser.parse()
throwIfErrors()
val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 7e984fd1f7..3cfc1eb2a1 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -6,27 +6,14 @@
package scala.tools
package util
-import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.interpreter.IMain
-import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, InputStream,
- PrintWriter, Writer }
-import java.util.{ Locale }
-import java.util.regex.Pattern
-import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
- ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
- SimpleJavaFileObject, StandardLocation }
-import scala.reflect.io.{ AbstractFile, Directory, File, Path }
-import java.io.{File => JFile}
-import scala.io.Source
-import scala.util.{ Try, Success, Failure }
-import scala.util.Properties.lineSeparator
-import scala.collection.JavaConverters
-import scala.collection.generic.Clearable
-import java.net.URL
-import scala.language.reflectiveCalls
+import java.io.PrintWriter
-import Javap._
+trait JpResult {
+ def isError: Boolean
+ def value: Any
+ def show(): Unit
+}
trait Javap {
def loader: ScalaClassLoader
@@ -43,672 +30,3 @@ object NoJavap extends Javap {
def tryFile(path: String): Option[Array[Byte]] = None
def tryClass(path: String): Array[Byte] = Array()
}
-
-class JavapClass(
- val loader: ScalaClassLoader,
- val printWriter: PrintWriter,
- intp: Option[IMain] = None
-) extends Javap {
- import JavapTool.ToolArgs
- import JavapClass._
-
- lazy val tool = JavapTool()
-
- /** Run the tool. Option args start with "-".
- * The default options are "-protected -verbose".
- * Byte data for filename args is retrieved with findBytes.
- */
- def apply(args: Seq[String]): List[JpResult] = {
- val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
- val (flags, upgraded) = upgrade(options)
- import flags.{ app, fun, help, raw }
- val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
- if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter)))
- else if (targets.isEmpty) List(JpResult("No anonfuns found."))
- else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app)))
- }
-
- /** Cull our tool options. */
- private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match {
- case (t,s) if s.nonEmpty => (t,s)
- case (t,s) => (t, JavapTool.DefaultOptions)
- }
-
- /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
- private def bytesFor(path: String, app: Boolean) = Try {
- def last = intp.get.mostRecentVar // fail if no intp
- def req = if (path == "-") last else {
- val s = path.splitHashMember
- if (s._1.nonEmpty) s._1
- else s._2 getOrElse "#"
- }
- def asAppBody(s: String) = {
- val (cls, fix) = s.splitSuffix
- s"${cls}$$delayedInit$$body${fix}"
- }
- def todo = if (app) asAppBody(req) else req
- val bytes = findBytes(todo)
- if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'")
- else bytes
- }
-
- def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
-
- /** Assume the string is a path and try to find the classfile
- * it represents.
- */
- def tryFile(path: String): Option[Array[Byte]] =
- (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
-
- /** Assume the string is a fully qualified class name and try to
- * find the class object it represents.
- * There are other symbols of interest, too:
- * - a definition that is wrapped in an enclosing class
- * - a synthetic that is not in scope but its associated class is
- */
- def tryClass(path: String): Array[Byte] = {
- def load(name: String) = loader classBytes name
- def loadable(name: String) = loader resourceable name
- // if path has an interior dollar, take it as a synthetic
- // if the prefix up to the dollar is a symbol in scope,
- // result is the translated prefix + suffix
- def desynthesize(s: String) = {
- val i = s indexOf '$'
- if (0 until s.length - 1 contains i) {
- val name = s substring (0, i)
- val sufx = s substring i
- val tran = intp flatMap (_ translatePath name)
- def loadableOrNone(strip: Boolean) = {
- def suffix(strip: Boolean)(x: String) =
- (if (strip && (x endsWith "$")) x.init else x) + sufx
- val res = tran map (suffix(strip) _)
- if (res.isDefined && loadable(res.get)) res else None
- }
- // try loading translated+suffix
- val res = loadableOrNone(strip = false)
- // some synthetics lack a dollar, (e.g., suffix = delayedInit$body)
- // so as a hack, if prefix$$suffix fails, also try prefix$suffix
- if (res.isDefined) res else loadableOrNone(strip = true)
- } else None
- }
- val p = path.asClassName // scrub any suffix
- // if repl, translate the name to something replish
- // (for translate, would be nicer to get the sym and ask .isClass,
- // instead of translatePath and then asking did I get a class back)
- val q = if (intp.isEmpty) p else (
- // only simple names get the scope treatment
- Some(p) filter (_ contains '.')
- // take path as a Name in scope
- orElse (intp flatMap (_ translatePath p) filter loadable)
- // take path as a Name in scope and find its enclosing class
- orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
- // take path as a synthetic derived from some Name in scope
- orElse desynthesize(p)
- // just try it plain
- getOrElse p
- )
- load(q)
- }
-
- /** Base class for javap tool adapters for java 6 and 7. */
- abstract class JavapTool {
- type ByteAry = Array[Byte]
- type Input = Pair[String, Try[ByteAry]]
-
- /** Run the tool. */
- def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
-
- // Since the tool is loaded by reflection, check for catastrophic failure.
- protected def failed: Boolean
- implicit protected class Failer[A](a: =>A) {
- def orFailed[B >: A](b: =>B) = if (failed) b else a
- }
- protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.")
-
- // output filtering support
- val writer = new CharArrayWriter
- def written = {
- writer.flush()
- val w = writer.toString
- writer.reset()
- w
- }
-
- /** Create a Showable with output massage.
- * @param raw show ugly repl names
- * @param target attempt to filter output to show region of interest
- * @param preamble other messages to output
- */
- def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
- // ReplStrippingWriter clips and scrubs on write(String)
- // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
- def show() =
- if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
- else writeLines()
- private def writeLines() {
- // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
- // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
- val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
- var filtering = false // true if in region matching filter
- // true to output
- def checkFilter(line: String) = if (filterOn.isEmpty) true else {
- // cheap heuristic, todo maybe parse for the java sig.
- // method sigs end in paren semi
- def isAnyMethod = line.endsWith(");")
- def isOurMethod = {
- val lparen = line.lastIndexOf('(')
- val blank = line.lastIndexOf(' ', lparen)
- (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
- }
- filtering = if (filtering) {
- // next blank line terminates section
- // for -public, next line is next method, more or less
- line.trim.nonEmpty && !isAnyMethod
- } else {
- isAnyMethod && isOurMethod
- }
- filtering
- }
- for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
- printWriter write line+lineSeparator
- printWriter.flush()
- }
- }
- }
-
- class JavapTool6 extends JavapTool {
- import JavapTool._
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
-
- val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
- val printWrapper = new PrintWriter(writer)
- def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- PrinterCtr.newInstance(in, printWrapper, env) orFailed null
- def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
- fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer
- printWrapper.flush() // just in case
- showWithPreamble(raw, target)
- }
-
- lazy val parser = new JpOptions
- def newEnv(opts: Seq[String]): FakeEnvironment = {
- def result = {
- val env: FakeEnvironment = EnvClass.newInstance()
- parser(opts) foreach { case (name, value) =>
- val field = EnvClass getDeclaredField name
- field setAccessible true
- field.set(env, value.asInstanceOf[AnyRef])
- }
- env
- }
- result orFailed null
- }
-
- override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
- (inputs map {
- case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
- case (_, Failure(e)) => JpResult(e.toString)
- }).toList orFailed List(noToolError)
- }
-
- class JavapTool7 extends JavapTool {
-
- import JavapTool._
- type Task = {
- def call(): Boolean // true = ok
- //def run(args: Array[String]): Int // all args
- //def handleOptions(args: Array[String]): Unit // options, then run() or call()
- }
- // result of Task.run
- //object TaskResult extends Enumeration {
- // val Ok, Error, CmdErr, SysErr, Abnormal = Value
- //}
- val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
- override protected def failed = TaskClaas eq null
-
- val TaskCtor = TaskClaas.getConstructor(
- classOf[Writer],
- classOf[JavaFileManager],
- classOf[DiagnosticListener[_]],
- classOf[JIterable[String]],
- classOf[JIterable[String]]
- ) orFailed null
-
- class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
- import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
- type D = Diagnostic[_ <: JavaFileObject]
- val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D]
- override def report(d: Diagnostic[_ <: JavaFileObject]) {
- diagnostics += d
- }
- override def clear() = diagnostics.clear()
- /** All diagnostic messages.
- * @param locale Locale for diagnostic messages, null by default.
- */
- def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList
-
- def reportable(raw: Boolean): String = {
- // don't filter this message if raw, since the names are likely to differ
- val container = "Binary file .* contains .*".r
- val m = if (raw) messages
- else messages filter (_ match { case container() => false case _ => true })
- clear()
- if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
- else ""
- }
- }
- val reporter = new JavaReporter
-
- // DisassemblerTool.getStandardFileManager(reporter,locale,charset)
- val defaultFileManager: JavaFileManager =
- (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod (
- "create",
- classOf[DiagnosticListener[_]],
- classOf[PrintWriter]
- ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null
-
- // manages named arrays of bytes, which might have failed to load
- class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager)
- extends ForwardingJavaFileManager[JavaFileManager](delegate) {
- import JavaFileObject.Kind
- import Kind._
- import StandardLocation._
- import JavaFileManager.Location
- import java.net.URI
- def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
-
- def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
- def managedFile(name: String, kind: Kind) = kind match {
- case CLASS => fileObjectForInput(name, inputNamed(name), kind)
- case _ => null
- }
- // todo: just wrap it as scala abstractfile and adapt it uniformly
- def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject =
- new SimpleJavaFileObject(uri(name), kind) {
- override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get)
- // if non-null, ClassWriter wrongly requires scheme non-null
- override def toUri: URI = null
- override def getName: String = name
- // suppress
- override def getLastModified: Long = -1L
- }
- override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject =
- location match {
- case CLASS_PATH => managedFile(className, kind)
- case _ => null
- }
- override def hasLocation(location: Location): Boolean =
- location match {
- case CLASS_PATH => true
- case _ => false
- }
- }
- def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
-
- // show tool messages and tool output, with output massage
- def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
-
- // eventually, use the tool interface
- def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
- //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
- //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
- import JavaConverters.asJavaIterableConverter
- TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
- .orFailed (throw new IllegalStateException)
- }
- // a result per input
- private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
- Try {
- task(options, Seq(claas), inputs).call()
- } map {
- case true => JpResult(showable(raw, claas))
- case _ => JpResult(reporter.reportable(raw))
- } recoverWith {
- case e: java.lang.reflect.InvocationTargetException => e.getCause match {
- case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
- case x => Failure(x)
- }
- } lastly {
- reporter.clear()
- }
- override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
- case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get
- case (_, Failure(e)) => JpResult(e.toString)
- }).toList orFailed List(noToolError)
- }
-
- object JavapTool {
- // >= 1.7
- val Tool = "com.sun.tools.javap.JavapTask"
-
- // < 1.7
- val Env = "sun.tools.javap.JavapEnvironment"
- val Printer = "sun.tools.javap.JavapPrinter"
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- // support JavapEnvironment
- class JpOptions {
- private object Access {
- final val PRIVATE = 0
- final val PROTECTED = 1
- final val PACKAGE = 2
- final val PUBLIC = 3
- }
- private val envActionMap: Map[String, (String, Any)] = {
- val map = Map(
- "-l" -> (("showLineAndLocal", true)),
- "-c" -> (("showDisassembled", true)),
- "-s" -> (("showInternalSigs", true)),
- "-verbose" -> (("showVerbose", true)),
- "-private" -> (("showAccess", Access.PRIVATE)),
- "-package" -> (("showAccess", Access.PACKAGE)),
- "-protected" -> (("showAccess", Access.PROTECTED)),
- "-public" -> (("showAccess", Access.PUBLIC)),
- "-all" -> (("showallAttr", true))
- )
- map ++ List(
- "-v" -> map("-verbose"),
- "-p" -> map("-private")
- )
- }
- def apply(opts: Seq[String]): Seq[(String, Any)] = {
- opts flatMap { opt =>
- envActionMap get opt match {
- case Some(pair) => List(pair)
- case _ =>
- val charOpts = opt.tail.toSeq map ("-" + _)
- if (charOpts forall (envActionMap contains _))
- charOpts map envActionMap
- else Nil
- }
- }
- }
- }
-
- case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
-
- object ToolArgs {
- def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
- case ((t,others), s) => s match {
- case "-fun" => (t copy (fun=true), others)
- case "-app" => (t copy (app=true), others)
- case "-help" => (t copy (help=true), others)
- case "-raw" => (t copy (raw=true), others)
- case _ => (t, others :+ s)
- }
- }
- }
-
- val helps = List(
- "usage" -> ":javap [opts] [path or class or -]...",
- "-help" -> "Prints this help message",
- "-raw" -> "Don't unmangle REPL names",
- "-app" -> "Show the DelayedInit body of Apps",
- "-fun" -> "Show anonfuns for class or Class#method",
- "-verbose/-v" -> "Stack size, number of locals, method args",
- "-private/-p" -> "Private classes and members",
- "-package" -> "Package-private classes and members",
- "-protected" -> "Protected classes and members",
- "-public" -> "Public classes and members",
- "-l" -> "Line and local variable tables",
- "-c" -> "Disassembled code",
- "-s" -> "Internal type signatures",
- "-sysinfo" -> "System info of class",
- "-constants" -> "Static final constants"
- )
-
- // match prefixes and unpack opts, or -help on failure
- def massage(arg: String): Seq[String] = {
- require(arg startsWith "-")
- // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
- val r = """(-[^/]*)(/(-.))?""".r
- def maybe(opt: String, s: String): Option[String] = opt match {
- // disambiguate by preferring short form
- case r(lf,_,sf) if s == sf => Some(sf)
- case r(lf,_,sf) if lf startsWith s => Some(lf)
- case _ => None
- }
- def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
- // one candidate or one single-char candidate
- def uniqueOf(maybes: Seq[String]) = {
- def single(s: String) = s.length == 2
- if (maybes.length == 1) maybes
- else if ((maybes count single) == 1) maybes filter single
- else Nil
- }
- // each optchar must decode to exactly one option
- def unpacked(s: String): Try[Seq[String]] = {
- val ones = (s drop 1) map { c =>
- val maybes = uniqueOf(candidates(s"-$c"))
- if (maybes.length == 1) Some(maybes.head) else None
- }
- Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
- }
- val res = uniqueOf(candidates(arg))
- if (res.nonEmpty) res
- else (unpacked(arg)
- getOrElse (Seq("-help"))) // or else someone needs help
- }
-
- def helper(pw: PrintWriter) = new Showable {
- def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
- }
-
- val DefaultOptions = List("-protected", "-verbose")
-
- def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
-
- private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
-
- private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
-
- def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
- }
-}
-
-object JavapClass {
- def apply(
- loader: ScalaClassLoader = ScalaClassLoader.appLoader,
- printWriter: PrintWriter = new PrintWriter(System.out, true),
- intp: Option[IMain] = None
- ) = new JavapClass(loader, printWriter, intp)
-
- // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
- // or a resource path com/acme/Widget.class; but not widget.out
- implicit class MaybeClassLike(val s: String) extends AnyVal {
- /* private[this] final val suffix = ".class" */
- private def suffix = ".class"
- def asClassName = (s stripSuffix suffix).replace('/', '.')
- def asClassResource = if (s endsWith suffix) s else s.replace('.', '/') + suffix
- def splitSuffix: (String, String) = if (s endsWith suffix) (s dropRight suffix.length, suffix) else (s, "")
- def strippingSuffix(f: String => String): String =
- if (s endsWith suffix) f(s dropRight suffix.length) else s
- // e.g. Foo#bar. Foo# yields zero-length member part.
- def splitHashMember: (String, Option[String]) = {
- val i = s lastIndexOf '#'
- if (i < 0) (s, None)
- //else if (i >= s.length - 1) (s.init, None)
- else (s take i, Some(s drop i+1))
- }
- }
- implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
- private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
- def parents: List[ClassLoader] = parentsOf(cl)
- /* all file locations */
- def locations = {
- def alldirs = parents flatMap (_ match {
- case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
- case jcl: java.net.URLClassLoader => jcl.getURLs
- case _ => Nil
- })
- val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
- dirs
- }
- /* only the file location from which the given class is loaded */
- def locate(k: String): Option[Path] = {
- Try {
- val claas = try cl loadClass k catch {
- case _: NoClassDefFoundError => null // let it snow
- }
- // cf ScalaClassLoader.originOfClass
- claas.getProtectionDomain.getCodeSource.getLocation
- } match {
- case Success(null) => None
- case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
- case _ => None
- }
- }
- /* would classBytes succeed with a nonempty array */
- def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
- }
- implicit class PathOps(val p: Path) extends AnyVal {
- import scala.tools.nsc.io.Jar
- def isJar = Jar isJarOrZip p
- }
- implicit class URLOps(val url: URL) extends AnyVal {
- def isFile: Boolean = url.getProtocol == "file"
- }
- object FunFinder {
- def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
- }
- class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
-
- // class k, candidate f without prefix
- def isFunOfClass(k: String, f: String) = {
- val p = (s"${Pattern quote k}\\$$+anonfun").r
- (p findPrefixOf f).nonEmpty
- }
- // class k, candidate f without prefix, method m
- def isFunOfMethod(k: String, m: String, f: String) = {
- val p = (s"${Pattern quote k}\\$$+anonfun\\$$${Pattern quote m}\\$$").r
- (p findPrefixOf f).nonEmpty
- }
- def isFunOfTarget(k: String, m: Option[String], f: String) =
- if (m.isEmpty) isFunOfClass(k, f)
- else isFunOfMethod(k, m.get, f)
- def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
- for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
- }
- // path prefix p, class k, dir d
- def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
- val subdir = Path(p)
- for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
- yield f.name
- }
- // path prefix p, class k, jar file f
- def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
- import java.util.jar.JarEntry
- import scala.tools.nsc.io.Jar
- def maybe(e: JarEntry) = {
- val (path, name) = {
- val parts = e.getName split "/"
- if (parts.length < 2) ("", e.getName)
- else (parts.init mkString "/", parts.last)
- }
- if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
- }
- (new Jar(f) map maybe).flatten
- }
- def loadable(name: String) = loader resourceable name
- // translated class, optional member, opt member to filter on, whether it is repl output
- def translate(s: String): (String, Option[String], Option[String], Boolean) = {
- val (k0, m0) = s.splitHashMember
- val k = k0.asClassName
- val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
- val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
- // class is either something replish or available to loader
- // $line.$read$$etc$Foo#member
- ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
- // s = "f" and $line.$read$$etc$#f is what we're after,
- // ignoring any #member (except take # as filter on #apply)
- orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
- getOrElse (k, member, filter, false))
- }
- /** Find the classnames of anonfuns associated with k,
- * where k may be an available class or a symbol in scope.
- */
- def funsOf(k0: String): Seq[String] = {
- // class is either something replish or available to loader
- val (k, member, filter, isReplish) = translate(k0)
- val splat = k split "\\."
- val name = splat.last
- val prefix = if (splat.length > 1) splat.init mkString "/" else ""
- val pkg = if (splat.length > 1) splat.init mkString "." else ""
- // reconstitute an anonfun with a package
- // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
- def packaged(s: String) = {
- val p = if (pkg.isEmpty) s else s"$pkg.$s"
- val pm = filter map (p + "#" + _)
- pm getOrElse p
- }
- // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
- val fs = if (isReplish) {
- def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
- if (p.isEmpty) Option(d)
- else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
- }
- outed(intp.get.replOutput.dir, splat.init) map { d =>
- listFunsInAbsFile(name, member, d) map packaged
- }
- } else {
- loader locate k map { w =>
- if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
- else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
- else Nil
- }
- }
- fs match {
- case Some(xs) => xs.to[Seq] // maybe empty
- case None => Seq() // nothing found, e.g., junk input
- }
- }
- def funs(ks: Seq[String]) = ks flatMap funsOf _
- }
-}
-
-object Javap {
-
- def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
-
- def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
-
- trait Showable {
- def show(): Unit
- }
-
- sealed trait JpResult {
- type ResultType
- def isError: Boolean
- def value: ResultType
- def show(): Unit
- // todo
- // def header(): String
- // def fields(): List[String]
- // def methods(): List[String]
- // def signatures(): List[String]
- }
- object JpResult {
- def apply(msg: String) = new JpError(msg)
- def apply(res: Showable) = new JpSuccess(res)
- }
- class JpError(msg: String) extends JpResult {
- type ResultType = String
- def isError = true
- def value = msg
- def show() = println(msg) // makes sense for :javap, less for -Ygen-javap
- }
- class JpSuccess(val value: Showable) extends JpResult {
- type ResultType = AnyRef
- def isError = false
- def show() = value.show() // output to tool's PrintWriter
- }
- implicit class Lastly[A](val t: Try[A]) extends AnyVal {
- private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
- def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index 523e0d57b7..f84fa161c0 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -263,7 +263,7 @@ trait CompilerControl { self: Global =>
* compiler thread.
*/
def parseTree(source: SourceFile): Tree = {
- new UnitParser(new CompilationUnit(source)).parse()
+ newUnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index 93ef4c4d6c..93ef4c4d6c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 33b10d1a9a..099a882f10 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -14,11 +14,78 @@ import scala.tools.nsc.util.MultiHashMap
import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
-import scala.tools.nsc.typechecker.DivergentImplicit
+import scala.tools.nsc.doc.ScaladocAnalyzer
+import scala.tools.nsc.typechecker.{ Analyzer, DivergentImplicit }
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import scala.annotation.elidable
+import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
+trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer {
+ val global : Global
+ import global._
+ override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper {
+ override def canAdaptConstantTypeToLiteral = false
+ }
+}
+
+trait InteractiveAnalyzer extends Analyzer {
+ val global : Global
+ import global._
+
+ override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
+ override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
+ override protected def newPatternMatching = false
+
+ trait InteractiveTyper extends Typer {
+ override def canAdaptConstantTypeToLiteral = false
+ override def canTranslateEmptyListToNil = false
+ override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ }
+
+ trait InteractiveNamer extends Namer {
+ override def saveDefaultGetter(meth: Symbol, default: Symbol) {
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
+ }
+ // this logic is needed in case typer was interrupted half
+ // way through and then comes back to do the tree again. In
+ // that case the definitions that were already attributed as
+ // well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ override def enterExistingSym(sym: Symbol): Context = {
+ if (sym != null && sym.owner.isTerm) {
+ enterIfNotThere(sym)
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
+ }
+ super.enterExistingSym(sym)
+ }
+ override def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ @tailrec def search(e: ScopeEntry) {
+ if ((e eq null) || (e.owner ne scope))
+ scope enter sym
+ else if (e.sym ne sym) // otherwise, aborts since we found sym
+ search(e.tail)
+ }
+ search(scope lookupEntry sym.name)
+ }
+ }
+}
+
+
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
@@ -68,8 +135,26 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
@inline final def informIDE(msg: => String) =
if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+ // don't keep the original owner in presentation compiler runs
+ // (the map will grow indefinitely, and the only use case is the backend)
+ override protected def saveOriginalOwner(sym: Symbol) { }
+ override protected def originalEnclosingMethod(sym: Symbol) =
+ abort("originalOwner is not kept in presentation compiler runs.")
+
override def forInteractive = true
+ override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new InteractiveAsSeenFromMap(pre, clazz)
+
+ class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
+ /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
+ * which it is currently supposed it is not.
+ *
+ * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
+ * method rather than aborting in the failure case.
+ */
+ }
+
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
@@ -127,6 +212,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ override lazy val analyzer = new {
+ val global: Global.this.type = Global.this
+ } with InteractiveAnalyzer
+
private def cleanAllResponses() {
cleanResponses(waitLoadedTypeResponses)
cleanResponses(getParsedEnteredResponses)
@@ -281,7 +370,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* top-level idents. Therefore, we can detect top-level symbols that have a name
* different from their source file
*/
- override lazy val loaders = new BrowsingLoaders {
+ override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders {
val global: Global.this.type = Global.this
}
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
index 013b152e96..013b152e96 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
new file mode 100644
index 0000000000..3b4a36f62d
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools
+package nsc
+package interactive
+
+/** The main class for NSC, a compiler for the programming
+ * language Scala.
+ */
+object Main extends nsc.MainClass {
+ override def processSettingsHook(): Boolean = {
+ if (this.settings.Yidedebug.value) {
+ this.settings.Xprintpos.value = true
+ this.settings.Yrangepos.value = true
+ val compiler = new interactive.Global(this.settings, this.reporter)
+ import compiler.{ reporter => _, _ }
+
+ val sfs = command.files map getSourceFile
+ val reloaded = new interactive.Response[Unit]
+ askReload(sfs, reloaded)
+
+ reloaded.get.right.toOption match {
+ case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
+ case None => reporter.reset() // Causes other compiler errors to be ignored
+ }
+ askShutdown
+ false
+ }
+ else true
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index b184afd0f5..b184afd0f5 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
index a2d8e5d49a..a2d8e5d49a 100644
--- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
+++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
index 04c06b9357..04c06b9357 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
index 6288400629..c57e1da184 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -10,4 +10,5 @@ package interactive
trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
self: scala.tools.nsc.Global =>
+ override def useOffsetPositions = false
}
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
index f36f769ec9..f36f769ec9 100644
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
index b83c2cd095..b83c2cd095 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
index 7af9174704..7af9174704 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index a4a2de9b51..a4a2de9b51 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
index ad5c61b2b0..ad5c61b2b0 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
index 9382d5890f..9382d5890f 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
index 8d446cbbf8..8d446cbbf8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index 9085eb56e6..9085eb56e6 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index 5cda0e53fb..9a2abd5139 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -13,11 +13,16 @@ private[tests] trait PresentationCompilerInstance extends TestSettings {
override def compiler = PresentationCompilerInstance.this.compiler
}
+ private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) {
+ override lazy val analyzer = new {
+ val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this
+ } with InteractiveScaladocAnalyzer
+ }
+
protected lazy val compiler: Global = {
prepareSettings(settings)
- new Global(settings, compilerReporter) {
- override def forScaladoc = withDocComments
- }
+ if (withDocComments) new ScaladocEnabledGlobal
+ else new Global(settings, compilerReporter)
}
/**
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index b5ae5f2d75..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
index 4d5b4e1129..4d5b4e1129 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
index 631504cda5..631504cda5 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
index 676feeba8a..676feeba8a 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
index a5c228a549..a5c228a549 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
index 887c3cf29b..887c3cf29b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
index 681204172b..681204172b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 3aaf784cad..7e4c3b842c 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -38,27 +38,15 @@ trait DirectRunner {
def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
System.setProperty("line.separator", "\n")
- // @partest maintainer: we cannot create a fresh file manager here
- // since the FM must respect --buildpath and --classpath from the command line
- // for example, see how it's done in ReflectiveRunner
- //val consFM = new ConsoleFileManager
- //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val latestCompFile = new File(fileManager.LATEST_COMP)
- val latestReflectFile = new File(fileManager.LATEST_REFLECT)
- val latestLibFile = new File(fileManager.LATEST_LIB)
- val latestPartestFile = new File(fileManager.LATEST_PARTEST)
- val latestActorsFile = new File(fileManager.LATEST_ACTORS)
- val scalacheckURL = PathSettings.scalaCheck.toURL
- val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
- )
-
- val kindFiles = onlyValidTestPaths(_kindFiles)
- val pool = Executors.newFixedThreadPool(numThreads)
- val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
- val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
+ val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls
+ val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(allUrls)
+ val kindFiles = onlyValidTestPaths(_kindFiles)
+ val pool = Executors.newFixedThreadPool(numThreads)
+ val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
+ val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
pool.shutdown()
+
try if (!pool.awaitTermination(4, TimeUnit.HOURS))
NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
catch { case t: InterruptedException =>
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index a4c4e7e6a6..a32c56e973 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -64,6 +64,20 @@ trait FileManager extends FileUtil {
var LATEST_PARTEST: String
var LATEST_ACTORS: String
+ protected def relativeToLibrary(what: String): String = {
+ if (LATEST_LIB endsWith ".jar") {
+ (SFile(LATEST_LIB).parent / s"scala-$what.jar").toAbsolute.path
+ }
+ else {
+ (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
+ }
+ }
+ def latestScaladoc = relativeToLibrary("scaladoc")
+ def latestInteractive = relativeToLibrary("interactive")
+ def latestPaths = List(LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, latestScaladoc, latestInteractive)
+ def latestFiles = latestPaths map (p => new java.io.File(p))
+ def latestUrls = latestFiles map (_.toURI.toURL)
+
var showDiff = false
var updateCheck = false
var showLog = false
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 3446dd0f72..05cae7b238 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -50,22 +50,15 @@ class ReflectiveRunner {
else // auto detection
new ConsoleFileManager
- import fileManager.
- { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile }
- val files =
- Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
-
- val sepUrls = files map (_.toURL)
- var sepLoader = new URLClassLoader(sepUrls, null)
-
// this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
+ // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
- sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
+ val sepUrls = PathSettings.srcCodeLib.toURI.toURL :: fileManager.latestUrls
+ val sepLoader = new URLClassLoader(sepUrls.toArray, null)
if (isPartestDebug)
- println("Loading classes from:\n" + sepUrls.mkString("\n"))
+ println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
// @partest maintainer: it seems to me that commented lines are incorrect
// if classPath is not empty, then it has been provided by the --classpath option
@@ -76,11 +69,8 @@ class ReflectiveRunner {
// case Some(cp) => Nil
// case _ => files.toList map (_.path)
//}
- val paths = files.toList map (_.path)
-
- val newClasspath = ClassPath.join(paths: _*)
- setProp("java.class.path", newClasspath)
+ setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
// don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
//setProp("scala.home", latestLibFile.parent.parent.path)
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 842491d56d..93383f5376 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -4,12 +4,9 @@ package internal
import settings.MutableSettings
trait Required { self: SymbolTable =>
-
def picklerPhase: Phase
-
def settings: MutableSettings
- def forInteractive: Boolean
-
- def forScaladoc: Boolean
+ @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false
+ @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false
}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 26ca62c44a..6837f37445 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -69,6 +69,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
+ // TODO - don't allow the owner to be changed without checking invariants, at least
+ // when under some flag. Define per-phase invariants for owner/owned relationships,
+ // e.g. after flatten all classes are owned by package classes, there are lots and
+ // lots of these to be declared (or more realistically, discovered.)
+ protected def saveOriginalOwner(sym: Symbol) {
+ if (originalOwner contains sym) ()
+ else originalOwner(sym) = sym.rawowner
+ }
+ protected def originalEnclosingMethod(sym: Symbol): Symbol = {
+ if (sym.isMethod || sym == NoSymbol) sym
+ else {
+ val owner = originalOwner.getOrElse(sym, sym.rawowner)
+ if (sym.isLocalDummy) owner.enclClass.primaryConstructor
+ else originalEnclosingMethod(owner)
+ }
+ }
+
abstract class SymbolContextApiImpl extends SymbolContextApi {
this: Symbol =>
@@ -948,13 +965,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// e.g. after flatten all classes are owned by package classes, there are lots and
// lots of these to be declared (or more realistically, discovered.)
def owner_=(owner: Symbol) {
- // don't keep the original owner in presentation compiler runs
- // (the map will grow indefinitely, and the only use case is the
- // backend).
- if (!forInteractive) {
- if (originalOwner contains this) ()
- else originalOwner(this) = rawowner
- }
+ saveOriginalOwner(this)
assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code")
if (traceSymbolActivity)
traceSymbols.recordNewSymbolOwner(this, owner)
@@ -1912,15 +1923,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* originalOwner map is not populated for memory considerations (the symbol
* may hang on to lazy types and in turn to whole (outdated) compilation units.
*/
- def originalEnclosingMethod: Symbol = {
- assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.")
- if (isMethod) this
- else {
- val owner = originalOwner.getOrElse(this, rawowner)
- if (isLocalDummy) owner.enclClass.primaryConstructor
- else owner.originalEnclosingMethod
- }
- }
+ def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this)
/** The method or class which logically encloses the current symbol.
* If the symbol is defined in the initialization part of a template
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index f4befb1470..a6c5367425 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -14,7 +14,6 @@ import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
import util.Statistics
-import scala.runtime.ObjectRef
import util.ThreeValues._
import Variance._
@@ -73,28 +72,33 @@ import Variance._
// only used during erasure of derived value classes.
*/
-trait Types extends api.Types { self: SymbolTable =>
+trait Types
+ extends api.Types
+ with tpe.TypeComparers
+ with tpe.TypeToStrings
+ with tpe.CommonOwners
+ with tpe.GlbLubs
+ with tpe.TypeMaps
+ with tpe.TypeConstraints { self: SymbolTable =>
+
import definitions._
import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
- private final val LogPendingSubTypesThreshold = 50
- private final val LogPendingBaseTypesThreshold = 50
- private final val LogVolatileThreshold = 50
+ protected[internal] final val DefaultLogThreshhold = 50
+ private final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
+ private final val LogVolatileThreshold = DefaultLogThreshhold
/** A don't care value for the depth parameter in lubs/glbs and related operations. */
- private final val AnyDepth = -3
+ protected[internal] final val AnyDepth = -3
/** Decrement depth unless it is a don't care. */
- private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
+ protected[internal] final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
- private final val printLubs = sys.props contains "scalac.debug.lub"
private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
private final val breakCycles = settings.breakCycles.value
- /** In case anyone wants to turn off lub verification without reverting anything. */
- private final val verifyLubs = true
/** In case anyone wants to turn off type parameter bounds being used
* to seed type constraints.
*/
@@ -107,80 +111,6 @@ trait Types extends api.Types { self: SymbolTable =>
*/
var skolemizationLevel = 0
- /** A log of type variable with their original constraints. Used in order
- * to undo constraints in the case of isSubType/isSameType failure.
- */
- lazy val undoLog = newUndoLog
-
- protected def newUndoLog = new UndoLog
-
- class UndoLog extends Clearable {
- private type UndoPairs = List[(TypeVar, TypeConstraint)]
- //OPT this method is public so we can do `manual inlining`
- var log: UndoPairs = List()
-
- /*
- * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
- *
- * The idea behind explicit locking mechanism is that all public methods that access mutable state
- * will have to obtain the lock for their entire execution so both reads and writes can be kept in
- * right order. Originally, that was achieved by overriding those public methods in
- * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
- * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
- * can go away.
- *
- * By using explicit locking we can achieve inlining.
- *
- * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
- * places implementation of `undo` or `undoUnless`). This should be changed back to protected
- * once inliner is fixed.
- */
- def lock(): Unit = ()
- def unlock(): Unit = ()
-
- // register with the auto-clearing cache manager
- perRunCaches.recordCache(this)
-
- /** Undo all changes to constraints to type variables upto `limit`. */
- //OPT this method is public so we can do `manual inlining`
- def undoTo(limit: UndoPairs) {
- assertCorrectThread()
- while ((log ne limit) && log.nonEmpty) {
- val (tv, constr) = log.head
- tv.constr = constr
- log = log.tail
- }
- }
-
- /** No sync necessary, because record should only
- * be called from within an undo or undoUnless block,
- * which is already synchronized.
- */
- private[reflect] def record(tv: TypeVar) = {
- log ::= ((tv, tv.constr.cloneInternal))
- }
-
- def clear() {
- lock()
- try {
- if (settings.debug.value)
- self.log("Clearing " + log.size + " entries from the undoLog.")
- log = Nil
- } finally unlock()
- }
-
- // `block` should not affect constraints on typevars
- def undo[T](block: => T): T = {
- lock()
- try {
- val before = log
-
- try block
- finally undoTo(before)
- } finally unlock()
- }
- }
-
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
@@ -3732,72 +3662,7 @@ trait Types extends api.Types { self: SymbolTable =>
newExistentialType(tparams1, tpe1)
}
- /** Normalize any type aliases within this type (@see Type#normalize).
- * Note that this depends very much on the call to "normalize", not "dealias",
- * so it is no longer carries the too-stealthy name "deAlias".
- */
- object normalizeAliases extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType =>
- def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
- mapOver(logResult(msg)(tp.normalize))
- case _ => mapOver(tp)
- }
- }
-
- /** Remove any occurrence of type <singleton> from this type and its parents */
- object dropSingletonType extends TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, SingletonClass, _) =>
- AnyClass.tpe
- case tp1 @ RefinedType(parents, decls) =>
- parents filter (_.typeSymbol != SingletonClass) match {
- case Nil => AnyClass.tpe
- case p :: Nil if decls.isEmpty => mapOver(p)
- case ps => mapOver(copyRefinedType(tp1, ps, decls))
- }
- case tp1 =>
- mapOver(tp1)
- }
- }
- }
- /** Type with all top-level occurrences of abstract types replaced by their bounds */
- object abstractTypesToBounds extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias)
- case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi)
- case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls)
- case AnnotatedType(_, _, _) => mapOver(tp)
- case _ => tp // no recursion - top level only
- }
- }
-
- // Set to true for A* => Seq[A]
- // (And it will only rewrite A* in method result types.)
- // This is the pre-existing behavior.
- // Or false for Seq[A] => Seq[A]
- // (It will rewrite A* everywhere but method parameters.)
- // This is the specified behavior.
- protected def etaExpandKeepsStar = false
-
- /** Turn any T* types into Seq[T] except when
- * in method parameter position.
- */
- object dropIllegalStarTypes extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case MethodType(params, restpe) =>
- // Not mapping over params
- val restpe1 = apply(restpe)
- if (restpe eq restpe1) tp
- else MethodType(params, restpe1)
- case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
- seqType(arg)
- case _ =>
- if (etaExpandKeepsStar) tp else mapOver(tp)
- }
- }
// Hash consing --------------------------------------------------------------
@@ -3817,121 +3682,6 @@ trait Types extends api.Types { self: SymbolTable =>
// Helper Classes ---------------------------------------------------------
- /** @PP: Unable to see why these apparently constant types should need vals
- * in every TypeConstraint, I lifted them out.
- */
- private lazy val numericLoBound = IntClass.tpe
- private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
-
- /** A class expressing upper and lower bounds constraints of type variables,
- * as well as their instantiations.
- */
- class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
- def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
- def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
- def this() = this(List(), List())
-
- /* Syncnote: Type constraints are assumed to be used from only one
- * thread. They are not exposed in api.Types and are used only locally
- * in operations that are exposed from types. Hence, no syncing of any
- * variables should be ncessesary.
- */
-
- /** Guard these lists against AnyClass and NothingClass appearing,
- * else loBounds.isEmpty will have different results for an empty
- * constraint and one with Nothing as a lower bound. [Actually
- * guarding addLoBound/addHiBound somehow broke raw types so it
- * only guards against being created with them.]
- */
- private var lobounds = lo0 filterNot typeIsNothing
- private var hibounds = hi0 filterNot typeIsAny
- private var numlo = numlo0
- private var numhi = numhi0
- private var avoidWidening = avoidWidening0
-
- def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
- def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
- def avoidWiden: Boolean = avoidWidening
-
- def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- // For some reason which is still a bit fuzzy, we must let Nothing through as
- // a lower bound despite the fact that Nothing is always a lower bound. My current
- // supposition is that the side-effecting type constraint accumulation mechanism
- // depends on these subtype tests being performed to make forward progress when
- // there are mutally recursive type vars.
- // See pos/t6367 and pos/t6499 for the competing test cases.
- val mustConsider = tp.typeSymbol match {
- case NothingClass => true
- case _ => !(lobounds contains tp)
- }
- if (mustConsider) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
- }
- else lobounds ::= tp
- }
- }
-
- def checkWidening(tp: Type) {
- if(tp.isStable) avoidWidening = true
- else tp match {
- case HasTypeMember(_, _) => avoidWidening = true
- case _ =>
- }
- }
-
- def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- // My current test case only demonstrates the need to let Nothing through as
- // a lower bound, but I suspect the situation is symmetrical.
- val mustConsider = tp.typeSymbol match {
- case AnyClass => true
- case _ => !(hibounds contains tp)
- }
- if (mustConsider) {
- checkWidening(tp)
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
- }
- else hibounds ::= tp
- }
- }
-
- def isWithinBounds(tp: Type): Boolean =
- lobounds.forall(_ <:< tp) &&
- hibounds.forall(tp <:< _) &&
- (numlo == NoType || (numlo weak_<:< tp)) &&
- (numhi == NoType || (tp weak_<:< numhi))
-
- var inst: Type = NoType // @M reduce visibility?
-
- def instValid = (inst ne null) && (inst ne NoType)
-
- def cloneInternal = {
- val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
- tc.inst = inst
- tc
- }
-
- override def toString = {
- val boundsStr = {
- val lo = loBounds filterNot typeIsNothing
- val hi = hiBounds filterNot typeIsAny
- val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
- val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
- lostr ++ histr mkString ("[", " | ", "]")
- }
- if (inst eq NoType) boundsStr
- else boundsStr + " _= " + inst.safeToString
- }
- }
-
class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, underlying, _) if annotated => apply(underlying)
@@ -3949,246 +3699,6 @@ trait Types extends api.Types { self: SymbolTable =>
object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
- trait AnnotationFilter extends TypeMap {
- def keepAnnotation(annot: AnnotationInfo): Boolean
-
- override def mapOver(annot: AnnotationInfo) =
- if (keepAnnotation(annot)) super.mapOver(annot)
- else UnmappableAnnotation
- }
-
- trait KeepOnlyTypeConstraints extends AnnotationFilter {
- // filter keeps only type constraint annotations
- def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
- }
-
- // todo. move these into scala.reflect.api
-
- /** A prototype for mapping a function over all possible types
- */
- abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) {
- def this() = this(trackVariance = false)
- def apply(tp: Type): Type
-
- private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant
-
- def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x }
- def variance = _variance
-
- /** Map this function over given type */
- def mapOver(tp: Type): Type = tp match {
- case tr @ TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- val args1 = (
- if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty)
- mapOverArgs(args, sym.typeParams)
- else
- args mapConserve this
- )
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
- case ThisType(_) => tp
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else singleType(pre1, sym)
- }
- case MethodType(params, result) =>
- val params1 = flipped(mapOver(params))
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- val tparams1 = flipped(mapOver(tparams))
- val result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case NullaryMethodType(result) =>
- val result1 = this(result)
- if (result1 eq result) tp
- else NullaryMethodType(result1)
- case ConstantType(_) => tp
- case SuperType(thistp, supertp) =>
- val thistp1 = this(thistp)
- val supertp1 = this(supertp)
- if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else SuperType(thistp1, supertp1)
- case TypeBounds(lo, hi) =>
- val lo1 = flipped(this(lo))
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case BoundedWildcardType(bounds) =>
- val bounds1 = this(bounds)
- if (bounds1 eq bounds) tp
- else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
- case rtp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve this
- val decls1 = mapOver(decls)
- copyRefinedType(rtp, parents1, decls1)
- case ExistentialType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- val result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
- case OverloadedType(pre, alts) =>
- val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
- if (pre1 eq pre) tp
- else OverloadedType(pre1, alts)
- case AntiPolyType(pre, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve this
- if ((pre1 eq pre) && (args1 eq args)) tp
- else AntiPolyType(pre1, args1)
- case tv@TypeVar(_, constr) =>
- if (constr.instValid) this(constr.inst)
- else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
- case NotNullType(tp) =>
- val tp1 = this(tp)
- if (tp1 eq tp) tp
- else NotNullType(tp1)
- case AnnotatedType(annots, atp, selfsym) =>
- val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
- if ((annots1 eq annots) && (atp1 eq atp)) tp
- else if (annots1.isEmpty) atp1
- else AnnotatedType(annots1, atp1, selfsym)
-/*
- case ErrorType => tp
- case WildcardType => tp
- case NoType => tp
- case NoPrefix => tp
- case ErasedSingleType(sym) => tp
-*/
- case _ =>
- tp
- // throw new Error("mapOver inapplicable for " + tp);
- }
-
- def withVariance[T](v: Variance)(body: => T): T = {
- val saved = variance
- variance = v
- try body finally variance = saved
- }
- @inline final def flipped[T](body: => T): T = {
- if (trackVariance) variance = variance.flip
- try body
- finally if (trackVariance) variance = variance.flip
- }
- protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = (
- if (trackVariance)
- map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg)))
- else
- args mapConserve this
- )
- /** Applies this map to the symbol's info, setting variance = Invariant
- * if necessary when the symbol is an alias.
- */
- private def applyToSymbolInfo(sym: Symbol): Type = {
- if (trackVariance && !variance.isInvariant && sym.isAliasType)
- withVariance(Invariant)(this(sym.info))
- else
- this(sym.info)
- }
-
- /** Called by mapOver to determine whether the original symbols can
- * be returned, or whether they must be cloned.
- */
- protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = {
- @tailrec def loop(syms: List[Symbol]): Boolean = syms match {
- case Nil => true
- case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs)
- }
- loop(origSyms)
- }
-
- /** Map this function over given scope */
- def mapOver(scope: Scope): Scope = {
- val elems = scope.toList
- val elems1 = mapOver(elems)
- if (elems1 eq elems) scope
- else newScopeWith(elems1: _*)
- }
-
- /** Map this function over given list of symbols */
- def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- // fast path in case nothing changes due to map
- if (noChangeToSymbols(origSyms)) origSyms
- // map is not the identity --> do cloning properly
- else cloneSymbolsAndModify(origSyms, TypeMap.this)
- }
-
- def mapOver(annot: AnnotationInfo): AnnotationInfo = {
- val AnnotationInfo(atp, args, assocs) = annot
- val atp1 = mapOver(atp)
- val args1 = mapOverAnnotArgs(args)
- // there is no need to rewrite assocs, as they are constants
-
- if ((args eq args1) && (atp eq atp1)) annot
- else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
- else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
- }
-
- def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
- val annots1 = annots mapConserve mapOver
- if (annots1 eq annots) annots
- else annots1 filterNot (_ eq UnmappableAnnotation)
- }
-
- /** Map over a set of annotation arguments. If any
- * of the arguments cannot be mapped, then return Nil. */
- def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
- val args1 = args mapConserve mapOver
- if (args1 contains UnmappableTree) Nil
- else args1
- }
-
- def mapOver(tree: Tree): Tree =
- mapOver(tree, () => return UnmappableTree)
-
- /** Map a tree that is part of an annotation argument.
- * If the tree cannot be mapped, then invoke giveup().
- * The default is to transform the tree with
- * TypeMapTransformer.
- */
- def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
- (new TypeMapTransformer).transform(tree)
-
- /** This transformer leaves the tree alone except to remap
- * its types. */
- class TypeMapTransformer extends Transformer {
- override def transform(tree: Tree) = {
- val tree1 = super.transform(tree)
- val tpe1 = TypeMap.this(tree1.tpe)
- if ((tree eq tree1) && (tree.tpe eq tpe1))
- tree
- else
- tree1.shallowDuplicate.setType(tpe1)
- }
- }
- }
-
- abstract class TypeTraverser extends TypeMap {
- def traverse(tp: Type): Unit
- def apply(tp: Type): Type = { traverse(tp); tp }
- }
-
- abstract class TypeTraverserWithResult[T] extends TypeTraverser {
- def result: T
- def clear(): Unit
- }
-
- abstract class TypeCollector[T](initial: T) extends TypeTraverser {
- var result: T = _
- def collect(tp: Type) = {
- result = initial
- traverse(tp)
- result
- }
- }
-
/** Repack existential types, otherwise they sometimes get unpacked in the
* wrong location (type inference comes up with an unexpected skolem)
*/
@@ -4234,707 +3744,8 @@ trait Types extends api.Types { self: SymbolTable =>
&& isRawIfWithoutArgs(sym)
)
- /** The raw to existential map converts a ''raw type'' to an existential type.
- * It is necessary because we might have read a raw type of a
- * parameterized Java class from a class file. At the time we read the type
- * the corresponding class file might still not be read, so we do not
- * know what the type parameters of the type are. Therefore
- * the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done).
- */
- def rawToExistential = new TypeMap {
- private var expanded = immutable.Set[Symbol]()
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- if (expanded contains sym) AnyRefClass.tpe
- else try {
- expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym))
- existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
- } finally {
- expanded -= sym
- }
- case _ =>
- mapOver(tp)
- }
- }
- /***
- *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
- object rawToExistentialInJava extends TypeMap {
- def apply(tp: Type): Type = tp match {
- // any symbol that occurs in a java sig, not just java symbols
- // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
- case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
- val eparams = typeParamsToExistentials(sym, sym.typeParams)
- existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
- case _ =>
- mapOver(tp)
- }
- }
- */
-
- /** Used by existentialAbstraction.
- */
- class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) {
- private val occurCount = mutable.HashMap[Symbol, Int]()
- private def countOccs(tp: Type) = {
- tp foreach {
- case TypeRef(_, sym, _) =>
- if (tparams contains sym)
- occurCount(sym) += 1
- case _ => ()
- }
- }
- def extrapolate(tpe: Type): Type = {
- tparams foreach (t => occurCount(t) = 0)
- countOccs(tpe)
- for (tparam <- tparams)
- countOccs(tparam.info)
-
- apply(tpe)
- }
-
- /** If these conditions all hold:
- * 1) we are in covariant (or contravariant) position
- * 2) this type occurs exactly once in the existential scope
- * 3) the widened upper (or lower) bound of this type contains no references to tparams
- * Then we replace this lone occurrence of the type with the widened upper (or lower) bound.
- * All other types pass through unchanged.
- */
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- if (variance.isInvariant) tp1
- else tp1 match {
- case TypeRef(pre, sym, args) if tparams contains sym =>
- val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- val count = occurCount(sym)
- val containsTypeParam = tparams exists (repl contains _)
- def msg = {
- val word = if (variance.isPositive) "upper" else "lower"
- s"Widened lone occurrence of $tp1 inside existential to $word bound"
- }
- if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
- logResult(msg)(repl)
- else
- tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- // Do not discard the types of existential ident's. The
- // symbol of the Ident itself cannot be listed in the
- // existential's parameters, so the resulting existential
- // type would be ill-formed.
- override def mapOver(tree: Tree) = tree match {
- case Ident(_) if tree.tpe.isStable => tree
- case _ => super.mapOver(tree)
- }
- }
-
def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
- /** Might the given symbol be important when calculating the prefix
- * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
- * the result will be `tp` unchanged if `pre` is trivial and `clazz`
- * is a symbol such that isPossiblePrefix(clazz) == false.
- */
- def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
-
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
- )
-
- def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
- new AsSeenFromMap(pre, clazz)
-
- /** A map to compute the asSeenFrom method.
- */
- class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
- // Some example source constructs relevant in asSeenFrom:
- //
- // object CaptureThis {
- // trait X[A] { def f: this.type = this }
- // class Y[A] { def f: this.type = this }
- // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton
- // def f1[B] = new X[B] { }
- // // TODO - why is the behavior different when it's a class?
- // def f2[B] = new Y[B] { }
- // }
- // class CaptureVal[T] {
- // val f: java.util.List[_ <: T] = null
- // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1
- // def g = f get 0
- // }
- // class ClassParam[T] {
- // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam)
- // class Inner(lhs: T) { def f = lhs }
- // }
- def capturedParams: List[Symbol] = _capturedParams
- def capturedSkolems: List[Symbol] = _capturedSkolems
-
- def apply(tp: Type): Type = tp match {
- case tp @ ThisType(_) => thisTypeAsSeen(tp)
- case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp)
- case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp)
- case _ => mapOver(tp)
- }
-
- private var _capturedSkolems: List[Symbol] = Nil
- private var _capturedParams: List[Symbol] = Nil
- private val isStablePrefix = seenFromPrefix.isStable
-
- // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate
- // but less succinct name.
- private def isBaseClassOfEnclosingClass(base: Symbol) = {
- def loop(encl: Symbol): Boolean = (
- isPossiblePrefix(encl)
- && ((encl isSubClass base) || loop(encl.owner.enclClass))
- )
- // The hasCompleteInfo guard is necessary to avoid cycles during the typing
- // of certain classes, notably ones defined inside package objects.
- !base.hasCompleteInfo || loop(seenFromClass)
- }
-
- /** Is the symbol a class type parameter from one of the enclosing
- * classes, or a base class of one of them?
- */
- private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = (
- sym.isTypeParameter
- && sym.owner.isClass
- && isBaseClassOfEnclosingClass(sym.owner)
- )
-
- /** Creates an existential representing a type parameter which appears
- * in the prefix of a ThisType.
- */
- protected def captureThis(pre: Type, clazz: Symbol): Type = {
- capturedParams find (_.owner == clazz) match {
- case Some(p) => p.tpe
- case _ =>
- val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
- _capturedParams ::= qvar
- debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
- qvar.tpe
- }
- }
- protected def captureSkolems(skolems: List[Symbol]) {
- for (p <- skolems; if !(capturedSkolems contains p)) {
- debuglog(s"Captured $p seen from $seenFromPrefix")
- _capturedSkolems ::= p
- }
- }
-
- /** Find the type argument in an applied type which corresponds to a type parameter.
- * The arguments are required to be related as follows, through intermediary `clazz`.
- * An exception will be thrown if this is violated.
- *
- * @param lhs its symbol is a type parameter of `clazz`
- * @param rhs a type application constructed from `clazz`
- */
- private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
- val TypeRef(_, lhsSym, lhsArgs) = lhs
- val TypeRef(_, rhsSym, rhsArgs) = rhs
- require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
-
- // Find the type parameter position; we'll use the corresponding argument
- val argIndex = rhsSym.typeParams indexOf lhsSym
-
- if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application
- appliedType(rhsArgs(argIndex), lhsArgs mapConserve this)
- else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641
- ErrorType
- else
- abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs")
- }
-
- // 0) @pre: `classParam` is a class type parameter
- // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam`
- // 2) Take the base type of the prefix at that point with respect to the owning class
- // 3) Solve for the type parameters through correspondence with the type args of the base type
- //
- // Only class type parameters (and not skolems) are considered, because other type parameters
- // are not influenced by the prefix through which they are seen. Note that type params of
- // anonymous type functions, which currently can only arise from normalising type aliases, are
- // owned by the type alias of which they are the eta-expansion.
- private def classParameterAsSeen(classParam: Type): Type = {
- val TypeRef(_, tparam, _) = classParam
-
- def loop(pre: Type, clazz: Symbol): Type = {
- // have to deconst because it may be a Class[T]
- def nextBase = (pre baseType clazz).deconst
- //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
- if (skipPrefixOf(pre, clazz))
- mapOver(classParam)
- else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
- loop(nextBase.prefix, clazz.owner)
- else nextBase match {
- case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied)
- case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
- case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
- }
- }
- loop(seenFromPrefix, seenFromClass)
- }
-
- // Does the candidate symbol match the given prefix and class?
- // Since pre may be something like ThisType(A) where trait A { self: B => },
- // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
- // B will not be considered.
- private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match {
- case _: TypeVar => false
- case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz)
- }
-
- // Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
- private[this] var wroteAnnotation = false
- private object annotationArgRewriter extends TypeMapTransformer {
- private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz)
-
- // what symbol should really be used?
- private def newThis(): Tree = {
- wroteAnnotation = true
- val presym = seenFromPrefix.widen.typeSymbol
- val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix
- gen.mkAttributedQualifier(seenFromPrefix, thisSym)
- }
-
- /** Rewrite `This` trees in annotation argument trees */
- override def transform(tree: Tree): Tree = super.transform(tree) match {
- case This(_) if matchesThis(tree.symbol) => newThis()
- case tree => tree
- }
- }
-
- // This becomes considerably cheaper if we optimize for the common cases:
- // where the prefix is stable and where no This nodes are rewritten. If
- // either is true, then we don't need to worry about calling giveup. So if
- // the prefix is unstable, use a stack variable to indicate whether the tree
- // was touched. This takes us to one allocation per AsSeenFromMap rather
- // than an allocation on every call to mapOver, and no extra work when the
- // tree only has its types remapped.
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- if (isStablePrefix)
- annotationArgRewriter transform tree
- else {
- val saved = wroteAnnotation
- wroteAnnotation = false
- try annotationArgRewriter transform tree
- finally if (wroteAnnotation) giveup() else wroteAnnotation = saved
- }
- }
-
- private def thisTypeAsSeen(tp: ThisType): Type = {
- def loop(pre: Type, clazz: Symbol): Type = {
- val pre1 = pre match {
- case SuperType(thistpe, _) => thistpe
- case _ => pre
- }
- if (skipPrefixOf(pre, clazz))
- mapOver(tp) // TODO - is mapOver necessary here?
- else if (!matchesPrefixAndClass(pre, clazz)(tp.sym))
- loop((pre baseType clazz).prefix, clazz.owner)
- else if (pre1.isStable)
- pre1
- else
- captureThis(pre1, clazz)
- }
- loop(seenFromPrefix, seenFromClass)
- }
-
- private def singleTypeAsSeen(tp: SingleType): Type = {
- val SingleType(pre, sym) = tp
-
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else if (pre1.isStable) singleType(pre1, sym)
- else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
- }
-
- override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)"
- }
-
- /** A base class to compute all substitutions */
- abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
- assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
-
- /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
- protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
-
- /** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, tp: T): Type
-
- protected def renameBoundSyms(tp: Type): Type = tp match {
- case MethodType(ps, restp) =>
- createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
- case PolyType(bs, restp) =>
- createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
- case ExistentialType(bs, restp) =>
- createFromClonedSymbols(bs, restp)(newExistentialType)
- case _ =>
- tp
- }
-
- @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = (
- if (from.isEmpty) tp
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
- )
-
- def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
- val boundSyms = tp0.boundSyms
- val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
- val tp = mapOver(tp1)
- def substFor(sym: Symbol) = subst(tp, sym, from, to)
-
- tp match {
- // @M
- // 1) arguments must also be substituted (even when the "head" of the
- // applied type has already been substituted)
- // example: (subst RBound[RT] from [type RT,type RBound] to
- // [type RT&,type RBound&]) = RBound&[RT&]
- // 2) avoid loops (which occur because alpha-conversion is
- // not performed properly imo)
- // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
- // we must replace the a in Iterable[a] by (a,b)
- // (must not recurse --> loops)
- // 3) replacing m by List in m[Int] should yield List[Int], not just List
- case TypeRef(NoPrefix, sym, args) =>
- val tcon = substFor(sym)
- if ((tp eq tcon) || args.isEmpty) tcon
- else appliedType(tcon.typeConstructor, args)
- case SingleType(NoPrefix, sym) =>
- substFor(sym)
- case _ =>
- tp
- }
- }
- }
-
- /** A map to implement the `substSym` method. */
- class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
- def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
-
- protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
- case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
- case SingleType(pre, _) => singleType(pre, sym)
- }
- @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = (
- if (from.isEmpty) sym
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
- else if (matches(from.head, sym)) to.head
- else subst(sym, from.tail, to.tail)
- )
- private def substFor(sym: Symbol) = subst(sym, from, to)
-
- override def apply(tp: Type): Type = (
- if (from.isEmpty) tp
- else tp match {
- case TypeRef(pre, sym, args) if pre ne NoPrefix =>
- val newSym = substFor(sym)
- // mapOver takes care of subst'ing in args
- mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
- // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- case SingleType(pre, sym) if pre ne NoPrefix =>
- val newSym = substFor(sym)
- mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
- case _ =>
- super.apply(tp)
- }
- )
-
- object mapTreeSymbols extends TypeMapTransformer {
- val strictCopy = newStrictTreeCopier
-
- def termMapsTo(sym: Symbol) = from indexOf sym match {
- case -1 => None
- case idx => Some(to(idx))
- }
-
- // if tree.symbol is mapped to another symbol, passes the new symbol into the
- // constructor `trans` and sets the symbol and the type on the resulting tree.
- def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
- case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
- case None => tree
- }
-
- // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
- override def transform(tree: Tree) = {
- // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
- super.transform(tree) match {
- case id @ Ident(_) =>
- transformIfMapped(id)(toSym =>
- strictCopy.Ident(id, toSym.name))
-
- case sel @ Select(qual, name) =>
- transformIfMapped(sel)(toSym =>
- strictCopy.Select(sel, qual, toSym.name))
-
- case tree => tree
- }
- }
- }
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- mapTreeSymbols.transform(tree)
- }
- }
-
- /** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
- protected def toType(fromtp: Type, tp: Type) = tp
-
- override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
- object trans extends TypeMapTransformer {
- override def transform(tree: Tree) = tree match {
- case Ident(name) =>
- from indexOf tree.symbol match {
- case -1 => super.transform(tree)
- case idx =>
- val totpe = to(idx)
- if (totpe.isStable) tree.duplicate setType totpe
- else giveup()
- }
- case _ =>
- super.transform(tree)
- }
- }
- trans.transform(tree)
- }
- }
-
- /** A map to implement the `substThis` method. */
- class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym == from) => to
- case _ => mapOver(tp)
- }
- }
-
- class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
- def apply(tp: Type): Type = try {
- tp match {
- case TypeRef(_, sym, _) if from contains sym =>
- BoundedWildcardType(sym.info.bounds)
- case _ =>
- mapOver(tp)
- }
- } catch {
- case ex: MalformedType =>
- WildcardType
- }
- }
-
-// dependent method types
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (tp.isImmediatelyDependent) result = true
- else if (!result) mapOver(tp)
- }
- }
-
- object ApproximateDependentMap extends TypeMap {
- def apply(tp: Type): Type =
- if (tp.isImmediatelyDependent) WildcardType
- else mapOver(tp)
- }
-
- /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
- */
- class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
- private val actuals = actuals0.toIndexedSeq
- private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
-
- private object StableArg {
- def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
- tp.isStable && (tp.typeSymbol != NothingClass)
- )
- }
- private object Arg {
- def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
- }
-
- def apply(tp: Type): Type = mapOver(tp) match {
- // unsound to replace args by unstable actual #3873
- case SingleType(NoPrefix, StableArg(arg)) => arg
- // (soundly) expand type alias selections on implicit arguments,
- // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
- case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
- val arg = actuals(pid)
- val res = typeRef(arg, sym, targs)
- if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
- // don't return the original `tp`, which may be different from `tp1`,
- // due to dropping annotations
- case tp1 => tp1
- }
-
- /* Return the type symbol for referencing a parameter inside the existential quantifier.
- * (Only needed if the actual is unstable.)
- */
- private def existentialFor(pid: Int) = {
- if (existentials(pid) eq null) {
- val param = params(pid)
- existentials(pid) = (
- param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
- setInfo singletonBounds(actuals(pid))
- )
- }
- existentials(pid)
- }
-
- //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
- override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
- // TODO: this should be simplified; in the stable case, one can
- // probably just use an Ident to the tree.symbol.
- //
- // @PP: That leads to failure here, where stuff no longer has type
- // 'String @Annot("stuff")' but 'String @Annot(x)'.
- //
- // def m(x: String): String @Annot(x) = x
- // val stuff = m("stuff")
- //
- // (TODO cont.) Why an existential in the non-stable case?
- //
- // @PP: In the following:
- //
- // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
- //
- // m is typed as 'String @Annot(x) forSome { val x: String }'.
- //
- // Both examples are from run/constrained-types.scala.
- object treeTrans extends Transformer {
- override def transform(tree: Tree): Tree = tree.symbol match {
- case StableArg(actual) =>
- gen.mkAttributedQualifier(actual, tree.symbol)
- case Arg(pid) =>
- val sym = existentialFor(pid)
- Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
- case _ =>
- super.transform(tree)
- }
- }
- treeTrans transform arg
- }
- }
-
- /** A map to convert every occurrence of a wildcard type to a fresh
- * type variable */
- object wildcardToTypeVarMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case WildcardType =>
- TypeVar(tp, new TypeConstraint)
- case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(bounds))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a type variable to a wildcard type. */
- object typeVarToOriginMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeVar(origin, _) => origin
- case _ => mapOver(tp)
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- tp.normalize match {
- case TypeRef(_, sym1, _) if (sym == sym1) => result = true
- case SingleType(_, sym1) if (sym == sym1) => result = true
- case _ => mapOver(tp)
- }
- }
- }
-
- override def mapOver(arg: Tree) = {
- for (t <- arg) {
- traverse(t.tpe)
- if (t.symbol == sym)
- result = true
- }
- arg
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- if (tp eq t) result = true
- else mapOver(tp)
- }
- }
- override def mapOver(arg: Tree) = {
- for (t <- arg)
- traverse(t.tpe)
-
- arg
- }
- }
-
- /** A map to implement the `filter` method. */
- class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (p(tp)) result ::= tp
- mapOver(tp)
- }
- }
-
- /** A map to implement the `collect` method. */
- class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (pf.isDefinedAt(tp)) result ::= pf(tp)
- mapOver(tp)
- }
- }
-
- class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
- def traverse(tp: Type) {
- f(tp)
- mapOver(tp)
- }
- }
-
- /** A map to implement the `filter` method. */
- class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
- def traverse(tp: Type) {
- if (result.isEmpty) {
- if (p(tp)) result = Some(tp)
- mapOver(tp)
- }
- }
- }
-
- /** A map to implement the `contains` method. */
- object ErroneousCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- result = tp.isError
- mapOver(tp)
- }
- }
- }
-
/**
* A more persistent version of `Type#memberType` which does not require
* that the symbol is a direct member of the prefix.
@@ -4980,194 +3791,10 @@ trait Types extends api.Types { self: SymbolTable =>
result
}
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given type.
- */
- private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
-
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given list
- * of types.
- */
- private def commonOwner(tps: List[Type]): Symbol = {
- if (tps.isEmpty) NoSymbol
- else {
- commonOwnerMap.clear()
- tps foreach (commonOwnerMap traverse _)
- if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
- }
- }
-
- protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
- protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
- var result: Symbol = _
-
- def clear() { result = null }
-
- private def register(sym: Symbol) {
- // First considered type is the trivial result.
- if ((result eq null) || (sym eq NoSymbol))
- result = sym
- else
- while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
- result = result.owner
- }
- def traverse(tp: Type) = tp.normalize match {
- case ThisType(sym) => register(sym)
- case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
- case SingleType(NoPrefix, sym) => register(sym.owner)
- case _ => mapOver(tp)
- }
- }
-
- private lazy val commonOwnerMapObj = new CommonOwnerMap
-
class MissingAliasControl extends ControlThrowable
val missingAliasException = new MissingAliasControl
class MissingTypeControl extends ControlThrowable
- object adaptToNewRunMap extends TypeMap {
-
- private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
- sym
- else if (sym.isModuleClass) {
- val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
-
- sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
- val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
- debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
- sym
- }
- }
- else {
- var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse {
- if (sym.isAliasType) throw missingAliasException
- devWarning(s"$pre.$sym no longer exist at phase $phase")
- throw new MissingTypeControl // For build manager and presentation compiler purposes
- }
- /** The two symbols have the same fully qualified name */
- def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
- if (!corresponds(sym.owner, rebind0.owner)) {
- debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
- val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
- if (bcs.isEmpty)
- assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
- else
- rebind0 = pre.baseType(bcs.head).member(sym.name)
- debuglog(
- "ADAPT2 pre = " + pre +
- ", bcs.head = " + bcs.head +
- ", sym = " + sym.fullLocationString +
- ", rebind = " + rebind0.fullLocationString
- )
- }
- rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
- debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
- throw new MalformedType(pre, sym.nameString)
- }
- }
- }
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) =>
- try {
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else ThisType(sym1)
- } catch {
- case ex: MissingTypeControl =>
- tp
- }
- case SingleType(pre, sym) =>
- if (sym.isPackage) tp
- else {
- val pre1 = this(pre)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym)) tp
- else singleType(pre1, sym1)
- } catch {
- case _: MissingTypeControl =>
- tp
- }
- }
- case TypeRef(pre, sym, args) =>
- if (sym.isPackageClass) tp
- else {
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
- tp
- } else if (sym1 == NoSymbol) {
- devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym")
- tp
- } else {
- copyTypeRef(tp, pre1, sym1, args1)
- }
- } catch {
- case ex: MissingAliasControl =>
- apply(tp.dealias)
- case _: MissingTypeControl =>
- tp
- }
- }
- case MethodType(params, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else copyMethodType(tp, params, restp1)
- case NullaryMethodType(restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else NullaryMethodType(restp1)
- case PolyType(tparams, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else PolyType(tparams, restp1)
-
- // Lukas: we need to check (together) whether we should also include parameter types
- // of PolyType and MethodType in adaptToNewRun
-
- case ClassInfoType(parents, decls, clazz) =>
- if (clazz.isPackageClass) tp
- else {
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else ClassInfoType(parents1, decls, clazz)
- }
- case RefinedType(parents, decls) =>
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
- case SuperType(_, _) => mapOver(tp)
- case TypeBounds(_, _) => mapOver(tp)
- case TypeVar(_, _) => mapOver(tp)
- case AnnotatedType(_,_,_) => mapOver(tp)
- case NotNullType(_) => mapOver(tp)
- case ExistentialType(_, _) => mapOver(tp)
- case _ => tp
- }
- }
-
- class SubTypePair(val tp1: Type, val tp2: Type) {
- override def hashCode = tp1.hashCode * 41 + tp2.hashCode
- override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match {
- // suspend TypeVars in types compared by =:=,
- // since we don't want to mutate them simply to check whether a subtype test is pending
- // in addition to making subtyping "more correct" for type vars,
- // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
- // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
- case stp: SubTypePair =>
- val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t))
- suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2)
- case _ =>
- false
- })
- override def toString = tp1+" <:<? "+tp2
- }
-
// Helper Methods -------------------------------------------------------------
/** The maximum allowable depth of lubs or glbs over types `ts`.
@@ -5275,84 +3902,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- private var subsametypeRecursions: Int = 0
-
- private def isUnifiable(pre1: Type, pre2: Type) =
- (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
-
- /** Returns true iff we are past phase specialize,
- * sym1 and sym2 are two existential skolems with equal names and bounds,
- * and pre1 and pre2 are equal prefixes
- */
- private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
- sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
- sym1.name == sym2.name &&
- phase.specialized &&
- sym1.info =:= sym2.info &&
- pre1 =:= pre2
- }
-
- private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
- if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
- if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
- true
- } else
- false
-
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
-
- /** Do `tp1` and `tp2` denote equivalent types? */
- def isSameType(tp1: Type, tp2: Type): Boolean = try {
- if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
- subsametypeRecursions += 1
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless {
-// isSameType1(tp1, tp2)
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
- try {
- result = isSameType1(tp1, tp2)
- }
- finally if (!result) undoLog.undoTo(before)
- result
- }
- finally undoLog.unlock()
- }
- finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
- subsametypeRecursions += 1
- undoLog undo { // undo type constraints that arise from operations in this block
- !isSameType1(tp1, tp2)
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
- case TypeRef(pre1, sym1, _) =>
- tp2 match {
- case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
- case _ => true
- }
- case _ => true
- }
-
def normalizePlus(tp: Type) =
if (isRawType(tp)) rawToExistential(tp)
else tp.normalize
@@ -5368,186 +3917,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
*/
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else {
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
- }
-
- def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _: SingleType =>
- return isSameType2(tp2, tp1) // put singleton type on the left, caught below
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case TypeRef(pre2, sym2, Nil) =>
- if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, typeVarLHS = true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, typeVarLHS = false)
- case _ =>
- }
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
- }
- }
/** Are `tps1` and `tps2` lists of pairwise equivalent types? */
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
@@ -5565,64 +3934,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
private val pendingBaseTypes = new mutable.HashSet[Type]
- def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
- def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
- subsametypeRecursions += 1
-
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
-// if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
-// val p = new SubTypePair(tp1, tp2)
-// if (pendingSubTypes(p))
-// false
-// else
-// try {
-// pendingSubTypes += p
-// isSubType2(tp1, tp2, depth)
-// } finally {
-// pendingSubTypes -= p
-// }
-// } else {
-// isSubType2(tp1, tp2, depth)
-// }
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType2(tp1, tp2, depth)
- }
- } finally if (!result) undoLog.undoTo(before)
-
- result
- } finally undoLog.unlock()
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
@@ -5755,42 +4069,6 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => false
}
- private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
- val PolyType(tparams1, res1) = tp1
- val PolyType(tparams2, res2) = tp2
-
- sameLength(tparams1, tparams2) && {
- // fast-path: polymorphic method type -- type params cannot be captured
- val isMethod = tparams1.head.owner.isMethod
- //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala
- val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
- def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
- def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
- def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
-
- (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
- }
- }
-
- // @assume tp1.isHigherKinded || tp2.isHigherKinded
- def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = {
- def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
- case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side
- case (_, TypeRef(_, NothingClass, _)) => false
- case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType)
- case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side
- case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes
- def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s"
- devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}")
- false
- }
-
- ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type
- || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
- || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's
- )
- }
-
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
def isSubArg(t1: Type, t2: Type, variance: Variance) = (
(variance.isContravariant || isSubType(t1, t2, depth))
@@ -5800,205 +4078,7 @@ trait Types extends api.Types { self: SymbolTable =>
corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
}
- /** Does type `tp1` conform to `tp2`? */
- private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
-
- /** First try, on the right:
- * - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
- * - handle common cases for first-kind TypeRefs on both sides as a fast path.
- */
- def firstTry = tp2 match {
- // fast path: two typerefs, none of them HK
- case tr2: TypeRef =>
- tp1 match {
- case tr1: TypeRef =>
- val sym1 = tr1.sym
- val sym2 = tr2.sym
- val pre1 = tr1.pre
- val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
- else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
- (isUnifiable(pre1, pre2) ||
- isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
- sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
- isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
- ||
- sym2.isClass && {
- val base = tr1 baseType sym2
- (base ne tr1) && isSubType(base, tr2, depth)
- }
- ||
- thirdTryRef(tr1, tr2))
- case _ =>
- secondTry
- }
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1, bounds.hi, depth)
- case tv2 @ TypeVar(_, constr2) =>
- tp1 match {
- case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
- secondTry
- case _ =>
- tv2.registerBound(tp1, isLowerBound = true)
- }
- case _ =>
- secondTry
- }
-
- /** Second try, on the left:
- * - unwrap AnnotatedTypes, BoundedWildcardTypes,
- * - bind typevars,
- * - handle existential types by skolemization.
- */
- def secondTry = tp1 match {
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1.bounds.lo, tp2, depth)
- case tv @ TypeVar(_,_) =>
- tv.registerBound(tp2, isLowerBound = false)
- case ExistentialType(_, _) =>
- try {
- skolemizationLevel += 1
- isSubType(tp1.skolemizeExistential, tp2, depth)
- } finally {
- skolemizationLevel -= 1
- }
- case _ =>
- thirdTry
- }
-
- def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
- val sym2 = tp2.sym
- sym2 match {
- case NotNullClass => tp1.isNotNull
- case SingletonClass => tp1.isStable || fourthTry
- case _: ClassSymbol =>
- if (isRawType(tp2))
- isSubType(tp1, rawToExistential(tp2), depth)
- else if (sym2.name == tpnme.REFINE_CLASS_NAME)
- isSubType(tp1, sym2.info, depth)
- else
- fourthTry
- case _: TypeSymbol =>
- if (sym2 hasFlag DEFERRED) {
- val tp2a = tp2.bounds.lo
- isDifferentTypeConstructor(tp2, tp2a) &&
- isSubType(tp1, tp2a, depth) ||
- fourthTry
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- fourthTry
- }
- }
-
- /** Third try, on the right:
- * - decompose refined types.
- * - handle typerefs, existentials, and notnull types.
- * - handle left+right method types, polytypes, typebounds
- */
- def thirdTry = tp2 match {
- case tr2: TypeRef =>
- thirdTryRef(tp1, tr2)
- case rt2: RefinedType =>
- (rt2.parents forall (isSubType(tp1, _, depth))) &&
- (rt2.decls forall (specializesSym(tp1, _, depth)))
- case et2: ExistentialType =>
- et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
- case nn2: NotNullType =>
- tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
- case mt2: MethodType =>
- tp1 match {
- case mt1 @ MethodType(params1, res1) =>
- val params2 = mt2.params
- val res2 = mt2.resultType
- (sameLength(params1, params2) &&
- mt1.isImplicit == mt2.isImplicit &&
- matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1.substSym(params1, params2), res2, depth))
- // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
- case _ =>
- false
- }
- case pt2 @ NullaryMethodType(_) =>
- tp1 match {
- // TODO: consider MethodType mt for which mt.params.isEmpty??
- case pt1 @ NullaryMethodType(_) =>
- isSubType(pt1.resultType, pt2.resultType, depth)
- case _ =>
- false
- }
- case TypeBounds(lo2, hi2) =>
- tp1 match {
- case TypeBounds(lo1, hi1) =>
- isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
- case _ =>
- false
- }
- case _ =>
- fourthTry
- }
-
- /** Fourth try, on the left:
- * - handle typerefs, refined types, notnull and singleton types.
- */
- def fourthTry = tp1 match {
- case tr1 @ TypeRef(pre1, sym1, _) =>
- sym1 match {
- case NothingClass => true
- case NullClass =>
- tp2 match {
- case TypeRef(_, sym2, _) =>
- containsNull(sym2)
- case _ =>
- isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
- }
- case _: ClassSymbol =>
- if (isRawType(tp1))
- isSubType(rawToExistential(tp1), tp2, depth)
- else if (sym1.isModuleClass) tp2 match {
- case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
- case _ => false
- }
- else if (sym1.isRefinementClass)
- isSubType(sym1.info, tp2, depth)
- else false
-
- case _: TypeSymbol =>
- if (sym1 hasFlag DEFERRED) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- false
- }
- case RefinedType(parents1, _) =>
- parents1 exists (isSubType(_, tp2, depth))
- case _: SingletonType | _: NotNullType =>
- isSubType(tp1.underlying, tp2, depth)
- case _ =>
- false
- }
-
- firstTry
- }
-
- private def containsNull(sym: Symbol): Boolean =
+ protected[internal] def containsNull(sym: Symbol): Boolean =
sym.isClass && sym != NothingClass &&
!(sym isNonBottomSubClass AnyValClass) &&
!(sym isNonBottomSubClass NotNullClass)
@@ -6020,7 +4100,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does member `sym1` of `tp1` have a stronger type
* than member `sym2` of `tp2`?
*/
- private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth)))
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
@@ -6154,7 +4234,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
/** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
- private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
case Nil =>
syms2.isEmpty
case sym1 :: rest1 =>
@@ -6183,87 +4263,6 @@ trait Types extends api.Types { self: SymbolTable =>
else x1 :: xs1
}
- /** Solve constraint collected in types `tvars`.
- *
- * @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to `tvars`
- * @param variances The variances of type parameters; need to reverse
- * solution direction for all contravariant variables.
- * @param upper When `true` search for max solution else min.
- */
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Variance], upper: Boolean): Boolean =
- solve(tvars, tparams, variances, upper, AnyDepth)
-
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Variance], upper: Boolean, depth: Int): Boolean = {
-
- def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
- if (tvar.constr.inst == NoType) {
- val up = if (variance.isContravariant) !upper else upper
- tvar.constr.inst = null
- val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
- //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
- var cyclic = bound contains tparam
- foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
- val ok = (tparam2 != tparam) && (
- (bound contains tparam2)
- || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
- || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
- )
- if (ok) {
- if (tvar2.constr.inst eq null) cyclic = true
- solveOne(tvar2, tparam2, variance2)
- }
- })
- if (!cyclic) {
- if (up) {
- if (bound.typeSymbol != AnyClass) {
- log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.lo.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- } else {
- if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
- log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.hi.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- }
- }
- tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
-
- //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
- val newInst = (
- if (up) {
- if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
- } else {
- if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
- }
- )
- log(s"$tvar setInst $newInst")
- tvar setInst newInst
- //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
- }
- }
-
- // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
- }
-
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
@@ -6276,168 +4275,6 @@ trait Types extends api.Types { self: SymbolTable =>
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
-// Lubs and Glbs ---------------------------------------------------------
-
- private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
- import util.TableDef
- import TableDef.Column
- def str(tp: Type) = {
- if (tp == NoType) ""
- else {
- val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
- if (s.length < 60) s
- else (s take 57) + "..."
- }
- }
-
- val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
- val maxSeqLength = sorted.map(_._2.size).max
- val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
- val transposed = padded.transpose
-
- val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
- case ((k, v), idx) =>
- Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true)
- }
-
- val tableDef = TableDef(columns: _*)
- val formatted = tableDef.table(transposed)
- println("** Depth is " + depth + "\n" + formatted)
- }
-
- /** From a list of types, find any which take type parameters
- * where the type parameter bounds contain references to other
- * any types in the list (including itself.)
- *
- * @return List of symbol pairs holding the recursive type
- * parameter and the parameter which references it.
- */
- def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
- if (ts.isEmpty) Nil
- else {
- val sym = ts.head.typeSymbol
- require(ts.tail forall (_.typeSymbol == sym), ts)
- for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
- p -> in
- }
- }
-
- /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
- * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
- *
- * xs <= ys iff forall y in ys exists x in xs such that x <: y
- *
- * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
- * (these type parameters may be referred to by type arguments in the BTS column of those types,
- * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
- * @arg tsBts a matrix whose columns are basetype sequences
- * the first row is the original list of types for which we're computing the lub
- * (except that type constructors have been applied to their dummyArgs)
- * @See baseTypeSeq for a definition of sorted and upwards closed.
- */
- private def lubList(ts: List[Type], depth: Int): List[Type] = {
- var lubListDepth = 0
- // This catches some recursive situations which would otherwise
- // befuddle us, e.g. pos/hklub0.scala
- def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
-
- def elimHigherOrderTypeParam(tp: Type) = tp match {
- case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) =>
- logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor)
- case _ => tp
- }
- // pretypes is a tail-recursion-preserving accumulator.
- @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
- lubListDepth += 1
-
- if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
- else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
- else {
- // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
- // Invariant: all symbols "under" (closer to the first row) the frontier
- // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
- val ts0 = tsBts map (_.head)
-
- // Is the frontier made up of types with the same symbol?
- val isUniformFrontier = (ts0: @unchecked) match {
- case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
- }
-
- // Produce a single type for this frontier by merging the prefixes and arguments of those
- // typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
- // merging, strip targs that refer to bound tparams (when we're computing the lub of type
- // constructors.) Also filter out all types that are a subtype of some other type.
- if (isUniformFrontier) {
- val fbounds = findRecursiveBounds(ts0) map (_._2)
- val tcLubList = typeConstructorLubList(ts0)
- def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains
-
- val ts1 = ts0 map { t =>
- if (isRecursive(t)) {
- tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match {
- case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp)
- case _ => t
- }
- }
- else t
- }
- val tails = tsBts map (_.tail)
- mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match {
- case Some(tp) => loop(tp :: pretypes, tails)
- case _ => loop(pretypes, tails)
- }
- }
- else {
- // frontier is not uniform yet, move it beyond the current minimal symbol;
- // lather, rinSe, repeat
- val sym = minSym(ts0)
- val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
- if (printLubs) {
- val str = (newtps.zipWithIndex map { case (tps, idx) =>
- tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
- }).mkString("")
-
- println("Frontier(\n" + str + ")")
- printLubMatrix((ts zip tsBts).toMap, lubListDepth)
- }
-
- loop(pretypes, newtps)
- }
- }
- }
-
- val initialBTSes = ts map (_.baseTypeSeq.toList)
- if (printLubs)
- printLubMatrix((ts zip initialBTSes).toMap, depth)
-
- loop(Nil, initialBTSes)
- }
-
- /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
- private def minSym(tps: List[Type]): Symbol =
- (tps.head.typeSymbol /: tps.tail) {
- (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
- }
-
- /** A minimal type list which has a given list of types as its base type sequence */
- def spanningTypes(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case first :: rest =>
- first :: spanningTypes(
- rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
- }
-
- /** Eliminate from list of types all elements which are a supertype
- * of some other element of the list. */
- private def elimSuper(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
- if (rest exists (t1 => t1 <:< t)) rest else t :: rest
- }
-
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
clazz.classBound.asSeenFrom(pre, clazz.owner)
@@ -6445,406 +4282,6 @@ trait Types extends api.Types { self: SymbolTable =>
t
}
- /** Eliminate from list of types all elements which are a subtype
- * of some other element of the list. */
- private def elimSub(ts: List[Type], depth: Int): List[Type] = {
- def elimSub0(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
- if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
- }
- val ts0 = elimSub0(ts)
- if (ts0.isEmpty || ts0.tail.isEmpty) ts0
- else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
- if (ts1 eq ts0) ts0
- else elimSub(ts1, depth)
- }
- }
-
- private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
- val quantified = ts flatMap {
- case ExistentialType(qs, _) => qs
- case t => List()
- }
- def stripType(tp: Type): Type = tp match {
- case ExistentialType(_, res) =>
- res
- case tv@TypeVar(_, constr) =>
- if (tv.instValid) stripType(constr.inst)
- else if (tv.untouchable) tv
- else abort("trying to do lub/glb of typevar "+tp)
- case t => t
- }
- val strippedTypes = ts mapConserve stripType
- (strippedTypes, quantified)
- }
-
- def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts exists typeHasAnnotations)
- (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
- else (lub(ts), false)
-
- def numericLub(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t2
- else if (isNumericSubType(t2, t1)) t1
- else IntClass.tpe)
-
- def isWeakSubType(tp1: Type, tp2: Type) =
- tp1.deconst.normalize match {
- case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- isNumericSubClass(sym1, sym2)
- case tv2 @ TypeVar(_, _) =>
- tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case tv1 @ TypeVar(_, _) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case _ =>
- isSubType(tp1, tp2)
- }
-
- /** The isNumericValueType tests appear redundant, but without them
- * test/continuations-neg/function3.scala goes into an infinite loop.
- * (Even if the calls are to typeSymbolDirect.)
- */
- def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
- isNumericValueType(tp1)
- && isNumericValueType(tp2)
- && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
- )
-
- private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
-
- /** Given a list of types, finds all the base classes they have in
- * common, then returns a list of type constructors derived directly
- * from the symbols (so any more specific type information is ignored.)
- * The list is filtered such that every type constructor in the list
- * expects the same number of type arguments, which is chosen based
- * on the deepest class among the common baseclasses.
- */
- def typeConstructorLubList(ts: List[Type]): List[Type] = {
- val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _)
- val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz))
-
- tcons map (_.typeConstructor) match {
- case Nil => Nil
- case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size)
- }
- }
-
- def lub(ts: List[Type]): Type = ts match {
- case List() => NothingClass.tpe
- case List(t) => t
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- val res = lub(ts, lubDepth(ts))
- // If the number of unapplied type parameters in all incoming
- // types is consistent, and the lub does not match that, return
- // the type constructor of the calculated lub instead. This
- // is because lubbing type constructors tends to result in types
- // which have been applied to dummies or Nothing.
- ts.map(_.typeParams.size).distinct match {
- case x :: Nil if res.typeParams.size != x =>
- logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor)
- case _ =>
- res
- }
- }
- finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- /** The least upper bound wrt <:< of a list of types */
- private def lub(ts: List[Type], depth: Int): Type = {
- def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
- case List() => NothingClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(glb(bounds, depth)))
- PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
- case ts @ AnnotatedType(annots, tpe, _) :: rest =>
- annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
- case ts =>
- lubResults get (depth, ts) match {
- case Some(lubType) =>
- lubType
- case None =>
- lubResults((depth, ts)) = AnyClass.tpe
- val res = if (depth < 0) AnyClass.tpe else lub1(ts)
- lubResults((depth, ts)) = res
- res
- }
- }
- def lub1(ts0: List[Type]): Type = {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val lubBaseTypes: List[Type] = lubList(ts, depth)
- val lubParents = spanningTypes(lubBaseTypes)
- val lubOwner = commonOwner(ts)
- val lubBase = intersectionType(lubParents, lubOwner)
- val lubType =
- if (phase.erasedTypes || depth == 0 ) lubBase
- else {
- val lubRefined = refinedType(lubParents, lubOwner)
- val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
- def excludeFromLub(sym: Symbol) = (
- sym.isClass
- || sym.isConstructor
- || !sym.isPublic
- || isGetClass(sym)
- || sym.isFinal
- || narrowts.exists(t => !refines(t, sym))
- )
- def lubsym(proto: Symbol): Symbol = {
- val prototp = lubThisType.memberInfo(proto)
- val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
- sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
-
- if (syms contains NoSymbol) NoSymbol
- else {
- val symtypes =
- map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
- if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
- else if (symtypes.tail forall (symtypes.head =:= _))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
- else {
- def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
- .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
- }
- }
- }
- def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives
- !syms.isEmpty && (syms forall (alt =>
- // todo alt != sym is strictly speaking not correct, but without it we lose
- // efficiency.
- alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
- }
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
- try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth))
- catch {
- case ex: NoCommonType =>
- }
- }
- if (lubRefined.decls.isEmpty) lubBase
- else if (!verifyLubs) lubRefined
- else {
- // Verify that every given type conforms to the calculated lub.
- // In theory this should not be necessary, but higher-order type
- // parameters are not handled correctly.
- val ok = ts forall { t =>
- isSubType(t, lubRefined, depth) || {
- if (settings.debug.value || printLubs) {
- Console.println(
- "Malformed lub: " + lubRefined + "\n" +
- "Argument " + t + " does not conform. Falling back to " + lubBase
- )
- }
- false
- }
- }
- // If not, fall back on the more conservative calculation.
- if (ok) lubRefined
- else lubBase
- }
- }
- // dropIllegalStarTypes is a localized fix for SI-6897. We should probably
- // integrate that transformation at a lower level in master, but lubs are
- // the likely and maybe only spot they escape, so fixing here for 2.10.1.
- existentialAbstraction(tparams, dropIllegalStarTypes(lubType))
- }
- if (printLubs) {
- println(indent + "lub of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- assert(indent.length <= 100)
- }
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = lub0(ts)
- if (printLubs) {
- indent = indent stripSuffix " "
- println(indent + "lub of " + ts + " is " + res)//debug
- }
- if (ts forall typeIsNotNull) res.notNull else res
- }
-
- val GlbFailure = new Throwable
-
- /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
- * call in `glb`. There's a possible infinite recursion when `specializes` calls
- * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
- * The counter breaks this recursion after two calls.
- * If the recursion is broken, no member is added to the glb.
- */
- private var globalGlbDepth = 0
- private final val globalGlbLimit = 2
-
- /** The greatest lower bound of a list of types (as determined by `<:<`). */
- def glb(ts: List[Type]): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- glbNorm(ts0, lubDepth(ts0))
- } finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 => glbNorm(ts0, depth)
- }
-
- /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
- * with regard to `elimSuper`. */
- protected def glbNorm(ts: List[Type], depth: Int): Type = {
- def glb0(ts0: List[Type]): Type = ts0 match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(lub(bounds, depth)))
- PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
- case ts =>
- glbResults get (depth, ts) match {
- case Some(glbType) =>
- glbType
- case _ =>
- glbResults((depth, ts)) = NothingClass.tpe
- val res = if (depth < 0) NothingClass.tpe else glb1(ts)
- glbResults((depth, ts)) = res
- res
- }
- }
- def glb1(ts0: List[Type]): Type = {
- try {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val glbOwner = commonOwner(ts)
- def refinedToParents(t: Type): List[Type] = t match {
- case RefinedType(ps, _) => ps flatMap refinedToParents
- case _ => List(t)
- }
- def refinedToDecls(t: Type): List[Scope] = t match {
- case RefinedType(ps, decls) =>
- val dss = ps flatMap refinedToDecls
- if (decls.isEmpty) dss else decls :: dss
- case _ => List()
- }
- val ts1 = ts flatMap refinedToParents
- val glbBase = intersectionType(ts1, glbOwner)
- val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
- else {
- val glbRefined = refinedType(ts1, glbOwner)
- val glbThisType = glbRefined.typeSymbol.thisType
- def glbsym(proto: Symbol): Symbol = {
- val prototp = glbThisType.memberInfo(proto)
- val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives)
- if glbThisType.memberInfo(alt) matches prototp
- ) yield alt
- val symtypes = syms map glbThisType.memberInfo
- assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
- if (proto.isTerm) glb(symtypes, decr(depth))
- else {
- def isTypeBound(tp: Type) = tp match {
- case TypeBounds(_, _) => true
- case _ => false
- }
- def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) TypeBounds(lo, hi)
- else throw GlbFailure
- }
- val symbounds = symtypes filter isTypeBound
- var result: Type =
- if (symbounds.isEmpty)
- TypeBounds.empty
- else glbBounds(symbounds)
- for (t <- symtypes if !isTypeBound(t))
- if (result.bounds containsType t) result = t
- else throw GlbFailure
- result
- })
- }
- if (globalGlbDepth < globalGlbLimit)
- try {
- globalGlbDepth += 1
- val dss = ts flatMap refinedToDecls
- for (ds <- dss; sym <- ds.iterator)
- if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
- try {
- addMember(glbThisType, glbRefined, glbsym(sym), depth)
- } catch {
- case ex: NoCommonType =>
- }
- } finally {
- globalGlbDepth -= 1
- }
- if (glbRefined.decls.isEmpty) glbBase else glbRefined
- }
- existentialAbstraction(tparams, glbType)
- } catch {
- case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
- }
- }
- // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
-
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = glb0(ts)
-
- // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
-
- if (ts exists typeIsNotNull) res.notNull else res
- }
-
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
var tvs: List[TypeVar] = Nil
@@ -6979,51 +4416,6 @@ trait Types extends api.Types { self: SymbolTable =>
def inheritsJavaVarArgsMethod(clazz: Symbol) =
clazz.thisType.baseClasses exists isJavaVarargsAncestor
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of list of bounds infos, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
- def getBounds(tp: Type): List[Type] = tp match {
- case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
- tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
- case tp =>
- if (tp ne tp.normalize) getBounds(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map getBounds
- }
-
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of instance types, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
- def transformResultType(tp: Type): Type = tp match {
- case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
- restpe.substSym(tparams1, tparams)
- case tp =>
- if (tp ne tp.normalize) transformResultType(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map transformResultType
- }
-
- /** All types in list must be method types with equal parameter types.
- * Returns list of their result types.
- */
- private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
- tps map {
- case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
- res
- case NullaryMethodType(res) if pts.isEmpty =>
- res
- case _ =>
- throw new NoCommonType(tps)
- }
-
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
@@ -7048,7 +4440,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** The current indentation string for traces */
- private var indent: String = ""
+ protected[internal] var indent: String = ""
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
@@ -7114,29 +4506,6 @@ trait Types extends api.Types { self: SymbolTable =>
"scala.collection.IndexedSeq",
"scala.collection.Iterator")
-
- /** The maximum number of recursions allowed in toString
- */
- final val maxTostringRecursions = 50
-
- private var tostringRecursions = 0
-
- protected def typeToString(tpe: Type): String =
- if (tostringRecursions >= maxTostringRecursions) {
- devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe))
- if (settings.debug.value)
- (new Throwable).printStackTrace
-
- "..."
- }
- else
- try {
- tostringRecursions += 1
- tpe.safeToString
- } finally {
- tostringRecursions -= 1
- }
-
// ----- Hoisted closures and convenience methods, for compile time reductions -------
private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
new file mode 100644
index 0000000000..e5ddd8f359
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
@@ -0,0 +1,50 @@
+package scala.reflect
+package internal
+package tpe
+
+private[internal] trait CommonOwners {
+ self: SymbolTable =>
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given type.
+ */
+ protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given list
+ * of types.
+ */
+ protected[internal] def commonOwner(tps: List[Type]): Symbol = {
+ if (tps.isEmpty) NoSymbol
+ else {
+ commonOwnerMap.clear()
+ tps foreach (commonOwnerMap traverse _)
+ if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
+ }
+ }
+
+ protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+ protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
+ var result: Symbol = _
+
+ def clear() { result = null }
+
+ private def register(sym: Symbol) {
+ // First considered type is the trivial result.
+ if ((result eq null) || (sym eq NoSymbol))
+ result = sym
+ else
+ while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
+ result = result.owner
+ }
+ def traverse(tp: Type) = tp.normalize match {
+ case ThisType(sym) => register(sym)
+ case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
+ case SingleType(NoPrefix, sym) => register(sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+
+ private lazy val commonOwnerMapObj = new CommonOwnerMap
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
new file mode 100644
index 0000000000..bdccc75d6d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -0,0 +1,592 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import util.Statistics
+import Variance._
+
+private[internal] trait GlbLubs {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val printLubs = sys.props contains "scalac.debug.lub"
+
+ /** In case anyone wants to turn off lub verification without reverting anything. */
+ private final val verifyLubs = true
+
+
+ private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
+ import util.TableDef
+ import TableDef.Column
+ def str(tp: Type) = {
+ if (tp == NoType) ""
+ else {
+ val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
+ if (s.length < 60) s
+ else (s take 57) + "..."
+ }
+ }
+
+ val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
+ val maxSeqLength = sorted.map(_._2.size).max
+ val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
+ val transposed = padded.transpose
+
+ val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
+ case ((k, v), idx) =>
+ Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true)
+ }
+
+ val tableDef = TableDef(columns: _*)
+ val formatted = tableDef.table(transposed)
+ println("** Depth is " + depth + "\n" + formatted)
+ }
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
+
+ /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+ * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
+ *
+ * xs <= ys iff forall y in ys exists x in xs such that x <: y
+ *
+ * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+ * (these type parameters may be referred to by type arguments in the BTS column of those types,
+ * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+ * @arg tsBts a matrix whose columns are basetype sequences
+ * the first row is the original list of types for which we're computing the lub
+ * (except that type constructors have been applied to their dummyArgs)
+ * @See baseTypeSeq for a definition of sorted and upwards closed.
+ */
+ def lubList(ts: List[Type], depth: Int): List[Type] = {
+ var lubListDepth = 0
+ // This catches some recursive situations which would otherwise
+ // befuddle us, e.g. pos/hklub0.scala
+ def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
+
+ def elimHigherOrderTypeParam(tp: Type) = tp match {
+ case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) =>
+ logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor)
+ case _ => tp
+ }
+ // pretypes is a tail-recursion-preserving accumulator.
+ @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
+ lubListDepth += 1
+
+ if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
+ else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
+ else {
+ // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
+ // Invariant: all symbols "under" (closer to the first row) the frontier
+ // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+ val ts0 = tsBts map (_.head)
+
+ // Is the frontier made up of types with the same symbol?
+ val isUniformFrontier = (ts0: @unchecked) match {
+ case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
+ }
+
+ // Produce a single type for this frontier by merging the prefixes and arguments of those
+ // typerefs that share the same symbol: that symbol is the current maximal symbol for which
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
+ // merging, strip targs that refer to bound tparams (when we're computing the lub of type
+ // constructors.) Also filter out all types that are a subtype of some other type.
+ if (isUniformFrontier) {
+ val fbounds = findRecursiveBounds(ts0) map (_._2)
+ val tcLubList = typeConstructorLubList(ts0)
+ def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains
+
+ val ts1 = ts0 map { t =>
+ if (isRecursive(t)) {
+ tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match {
+ case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp)
+ case _ => t
+ }
+ }
+ else t
+ }
+ val tails = tsBts map (_.tail)
+ mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match {
+ case Some(tp) => loop(tp :: pretypes, tails)
+ case _ => loop(pretypes, tails)
+ }
+ }
+ else {
+ // frontier is not uniform yet, move it beyond the current minimal symbol;
+ // lather, rinSe, repeat
+ val sym = minSym(ts0)
+ val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
+ if (printLubs) {
+ val str = (newtps.zipWithIndex map { case (tps, idx) =>
+ tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
+ }).mkString("")
+
+ println("Frontier(\n" + str + ")")
+ printLubMatrix((ts zip tsBts).toMap, lubListDepth)
+ }
+
+ loop(pretypes, newtps)
+ }
+ }
+ }
+
+ val initialBTSes = ts map (_.baseTypeSeq.toList)
+ if (printLubs)
+ printLubMatrix((ts zip initialBTSes).toMap, depth)
+
+ loop(Nil, initialBTSes)
+ }
+
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
+ private def minSym(tps: List[Type]): Symbol =
+ (tps.head.typeSymbol /: tps.tail) {
+ (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
+ }
+
+ /** A minimal type list which has a given list of types as its base type sequence */
+ def spanningTypes(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case first :: rest =>
+ first :: spanningTypes(
+ rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
+ }
+
+ /** Eliminate from list of types all elements which are a supertype
+ * of some other element of the list. */
+ private def elimSuper(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
+ if (rest exists (t1 => t1 <:< t)) rest else t :: rest
+ }
+
+ /** Eliminate from list of types all elements which are a subtype
+ * of some other element of the list. */
+ private def elimSub(ts: List[Type], depth: Int): List[Type] = {
+ def elimSub0(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
+ if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
+ }
+ val ts0 = elimSub0(ts)
+ if (ts0.isEmpty || ts0.tail.isEmpty) ts0
+ else {
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
+ if (ts1 eq ts0) ts0
+ else elimSub(ts1, depth)
+ }
+ }
+
+ private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
+ val quantified = ts flatMap {
+ case ExistentialType(qs, _) => qs
+ case t => List()
+ }
+ def stripType(tp: Type): Type = tp match {
+ case ExistentialType(_, res) =>
+ res
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) stripType(constr.inst)
+ else if (tv.untouchable) tv
+ else abort("trying to do lub/glb of typevar "+tp)
+ case t => t
+ }
+ val strippedTypes = ts mapConserve stripType
+ (strippedTypes, quantified)
+ }
+
+ def weakLub(ts: List[Type]) =
+ if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
+ else if (ts exists typeHasAnnotations)
+ (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
+ else (lub(ts), false)
+
+ def numericLub(ts: List[Type]) =
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntClass.tpe)
+
+ private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
+ private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
+
+ /** Given a list of types, finds all the base classes they have in
+ * common, then returns a list of type constructors derived directly
+ * from the symbols (so any more specific type information is ignored.)
+ * The list is filtered such that every type constructor in the list
+ * expects the same number of type arguments, which is chosen based
+ * on the deepest class among the common baseclasses.
+ */
+ def typeConstructorLubList(ts: List[Type]): List[Type] = {
+ val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _)
+ val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz))
+
+ tcons map (_.typeConstructor) match {
+ case Nil => Nil
+ case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size)
+ }
+ }
+
+ def lub(ts: List[Type]): Type = ts match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ val res = lub(ts, lubDepth(ts))
+ // If the number of unapplied type parameters in all incoming
+ // types is consistent, and the lub does not match that, return
+ // the type constructor of the calculated lub instead. This
+ // is because lubbing type constructors tends to result in types
+ // which have been applied to dummies or Nothing.
+ ts.map(_.typeParams.size).distinct match {
+ case x :: Nil if res.typeParams.size != x =>
+ logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor)
+ case _ =>
+ res
+ }
+ }
+ finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ /** The least upper bound wrt <:< of a list of types */
+ protected[internal] def lub(ts: List[Type], depth: Int): Type = {
+ def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
+ case ts =>
+ lubResults get (depth, ts) match {
+ case Some(lubType) =>
+ lubType
+ case None =>
+ lubResults((depth, ts)) = AnyClass.tpe
+ val res = if (depth < 0) AnyClass.tpe else lub1(ts)
+ lubResults((depth, ts)) = res
+ res
+ }
+ }
+ def lub1(ts0: List[Type]): Type = {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val lubBaseTypes: List[Type] = lubList(ts, depth)
+ val lubParents = spanningTypes(lubBaseTypes)
+ val lubOwner = commonOwner(ts)
+ val lubBase = intersectionType(lubParents, lubOwner)
+ val lubType =
+ if (phase.erasedTypes || depth == 0 ) lubBase
+ else {
+ val lubRefined = refinedType(lubParents, lubOwner)
+ val lubThisType = lubRefined.typeSymbol.thisType
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || sym.isFinal
+ || narrowts.exists(t => !refines(t, sym))
+ )
+ def lubsym(proto: Symbol): Symbol = {
+ val prototp = lubThisType.memberInfo(proto)
+ val syms = narrowts map (t =>
+ t.nonPrivateMember(proto.name).suchThat(sym =>
+ sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+
+ if (syms contains NoSymbol) NoSymbol
+ else {
+ val symtypes =
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ else if (symtypes.tail forall (symtypes.head =:= _))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
+ else {
+ def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
+ }
+ }
+ }
+ def refines(tp: Type, sym: Symbol): Boolean = {
+ val syms = tp.nonPrivateMember(sym.name).alternatives
+ !syms.isEmpty && (syms forall (alt =>
+ // todo alt != sym is strictly speaking not correct, but without it we lose
+ // efficiency.
+ alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
+ }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth))
+ catch {
+ case ex: NoCommonType =>
+ }
+ }
+ if (lubRefined.decls.isEmpty) lubBase
+ else if (!verifyLubs) lubRefined
+ else {
+ // Verify that every given type conforms to the calculated lub.
+ // In theory this should not be necessary, but higher-order type
+ // parameters are not handled correctly.
+ val ok = ts forall { t =>
+ isSubType(t, lubRefined, depth) || {
+ if (settings.debug.value || printLubs) {
+ Console.println(
+ "Malformed lub: " + lubRefined + "\n" +
+ "Argument " + t + " does not conform. Falling back to " + lubBase
+ )
+ }
+ false
+ }
+ }
+ // If not, fall back on the more conservative calculation.
+ if (ok) lubRefined
+ else lubBase
+ }
+ }
+ // dropIllegalStarTypes is a localized fix for SI-6897. We should probably
+ // integrate that transformation at a lower level in master, but lubs are
+ // the likely and maybe only spot they escape, so fixing here for 2.10.1.
+ existentialAbstraction(tparams, dropIllegalStarTypes(lubType))
+ }
+ if (printLubs) {
+ println(indent + "lub of " + ts + " at depth "+depth)//debug
+ indent = indent + " "
+ assert(indent.length <= 100)
+ }
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = lub0(ts)
+ if (printLubs) {
+ indent = indent stripSuffix " "
+ println(indent + "lub of " + ts + " is " + res)//debug
+ }
+ if (ts forall typeIsNotNull) res.notNull else res
+ }
+
+ val GlbFailure = new Throwable
+
+ /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+ * call in `glb`. There's a possible infinite recursion when `specializes` calls
+ * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
+ * The counter breaks this recursion after two calls.
+ * If the recursion is broken, no member is added to the glb.
+ */
+ private var globalGlbDepth = 0
+ private final val globalGlbLimit = 2
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
+ def glb(ts: List[Type]): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ glbNorm(ts0, lubDepth(ts0))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 => glbNorm(ts0, depth)
+ }
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
+ protected def glbNorm(ts: List[Type], depth: Int): Type = {
+ def glb0(ts0: List[Type]): Type = ts0 match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ case ts =>
+ glbResults get (depth, ts) match {
+ case Some(glbType) =>
+ glbType
+ case _ =>
+ glbResults((depth, ts)) = NothingClass.tpe
+ val res = if (depth < 0) NothingClass.tpe else glb1(ts)
+ glbResults((depth, ts)) = res
+ res
+ }
+ }
+ def glb1(ts0: List[Type]): Type = {
+ try {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val glbOwner = commonOwner(ts)
+ def refinedToParents(t: Type): List[Type] = t match {
+ case RefinedType(ps, _) => ps flatMap refinedToParents
+ case _ => List(t)
+ }
+ def refinedToDecls(t: Type): List[Scope] = t match {
+ case RefinedType(ps, decls) =>
+ val dss = ps flatMap refinedToDecls
+ if (decls.isEmpty) dss else decls :: dss
+ case _ => List()
+ }
+ val ts1 = ts flatMap refinedToParents
+ val glbBase = intersectionType(ts1, glbOwner)
+ val glbType =
+ if (phase.erasedTypes || depth == 0) glbBase
+ else {
+ val glbRefined = refinedType(ts1, glbOwner)
+ val glbThisType = glbRefined.typeSymbol.thisType
+ def glbsym(proto: Symbol): Symbol = {
+ val prototp = glbThisType.memberInfo(proto)
+ val syms = for (t <- ts;
+ alt <- (t.nonPrivateMember(proto.name).alternatives)
+ if glbThisType.memberInfo(alt) matches prototp
+ ) yield alt
+ val symtypes = syms map glbThisType.memberInfo
+ assert(!symtypes.isEmpty)
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+ if (proto.isTerm) glb(symtypes, decr(depth))
+ else {
+ def isTypeBound(tp: Type) = tp match {
+ case TypeBounds(_, _) => true
+ case _ => false
+ }
+ def glbBounds(bnds: List[Type]): TypeBounds = {
+ val lo = lub(bnds map (_.bounds.lo), decr(depth))
+ val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ if (lo <:< hi) TypeBounds(lo, hi)
+ else throw GlbFailure
+ }
+ val symbounds = symtypes filter isTypeBound
+ var result: Type =
+ if (symbounds.isEmpty)
+ TypeBounds.empty
+ else glbBounds(symbounds)
+ for (t <- symtypes if !isTypeBound(t))
+ if (result.bounds containsType t) result = t
+ else throw GlbFailure
+ result
+ })
+ }
+ if (globalGlbDepth < globalGlbLimit)
+ try {
+ globalGlbDepth += 1
+ val dss = ts flatMap refinedToDecls
+ for (ds <- dss; sym <- ds.iterator)
+ if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
+ try {
+ addMember(glbThisType, glbRefined, glbsym(sym), depth)
+ } catch {
+ case ex: NoCommonType =>
+ }
+ } finally {
+ globalGlbDepth -= 1
+ }
+ if (glbRefined.decls.isEmpty) glbBase else glbRefined
+ }
+ existentialAbstraction(tparams, glbType)
+ } catch {
+ case GlbFailure =>
+ if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
+ else NothingClass.tpe
+ }
+ }
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
+
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = glb0(ts)
+
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+
+ if (ts exists typeIsNotNull) res.notNull else res
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of list of bounds infos, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+ def getBounds(tp: Type): List[Type] = tp match {
+ case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
+ tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
+ case tp =>
+ if (tp ne tp.normalize) getBounds(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map getBounds
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of instance types, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+ def transformResultType(tp: Type): Type = tp match {
+ case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
+ restpe.substSym(tparams1, tparams)
+ case tp =>
+ if (tp ne tp.normalize) transformResultType(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map transformResultType
+ }
+
+ /** All types in list must be method types with equal parameter types.
+ * Returns list of their result types.
+ */
+ private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
+ tps map {
+ case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
+ res
+ case NullaryMethodType(res) if pts.isEmpty =>
+ res
+ case _ =>
+ throw new NoCommonType(tps)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
new file mode 100644
index 0000000000..82321f61c2
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -0,0 +1,617 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import Flags._
+import util.Statistics
+
+trait TypeComparers {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val LogPendingSubTypesThreshold = DefaultLogThreshhold
+
+ private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+
+ class SubTypePair(val tp1: Type, val tp2: Type) {
+ override def hashCode = tp1.hashCode * 41 + tp2.hashCode
+ override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match {
+ // suspend TypeVars in types compared by =:=,
+ // since we don't want to mutate them simply to check whether a subtype test is pending
+ // in addition to making subtyping "more correct" for type vars,
+ // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
+ // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
+ case stp: SubTypePair =>
+ val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t))
+ suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2)
+ case _ =>
+ false
+ })
+ override def toString = tp1+" <:<? "+tp2
+ }
+
+ private var subsametypeRecursions: Int = 0
+
+ private def isUnifiable(pre1: Type, pre2: Type) =
+ (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
+
+ /** Returns true iff we are past phase specialize,
+ * sym1 and sym2 are two existential skolems with equal names and bounds,
+ * and pre1 and pre2 are equal prefixes
+ */
+ private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+ sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+ sym1.name == sym2.name &&
+ phase.specialized &&
+ sym1.info =:= sym2.info &&
+ pre1 =:= pre2
+ }
+
+ private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
+ if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
+ if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
+ true
+ } else
+ false
+
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
+ if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+
+
+ def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
+ subsametypeRecursions += 1
+ undoLog undo { // undo type constraints that arise from operations in this block
+ !isSameType1(tp1, tp2)
+ }
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case TypeRef(pre1, sym1, _) =>
+ tp2 match {
+ case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
+ case _ => true
+ }
+ case _ => true
+ }
+
+ /** Do `tp1` and `tp2` denote equivalent types? */
+ def isSameType(tp1: Type, tp2: Type): Boolean = try {
+ if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
+ subsametypeRecursions += 1
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless {
+ // isSameType1(tp1, tp2)
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+ try {
+ result = isSameType1(tp1, tp2)
+ }
+ finally if (!result) undoLog.undoTo(before)
+ result
+ }
+ finally undoLog.unlock()
+ }
+ finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = {
+ if ((tp1 eq tp2) ||
+ (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
+ (tp2 eq ErrorType) || (tp2 eq WildcardType))
+ true
+ else if ((tp1 eq NoType) || (tp2 eq NoType))
+ false
+ else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp2.typeSymbol.isPackageClass
+ else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp1.typeSymbol.isPackageClass
+ else {
+ isSameType2(tp1, tp2) || {
+ val tp1n = normalizePlus(tp1)
+ val tp2n = normalizePlus(tp2)
+ ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
+ }
+ }
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ tp1 match {
+ case tr1: TypeRef =>
+ tp2 match {
+ case tr2: TypeRef =>
+ return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ isSameTypes(tr1.args, tr2.args))) ||
+ ((tr1.pre, tr2.pre) match {
+ case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
+ case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
+ case _ => false
+ })
+ case _: SingleType =>
+ return isSameType2(tp2, tp1) // put singleton type on the left, caught below
+ case _ =>
+ }
+ case tt1: ThisType =>
+ tp2 match {
+ case tt2: ThisType =>
+ if (tt1.sym == tt2.sym) return true
+ case _ =>
+ }
+ case st1: SingleType =>
+ tp2 match {
+ case st2: SingleType =>
+ if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
+ case TypeRef(pre2, sym2, Nil) =>
+ if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
+ case _ =>
+ }
+ case ct1: ConstantType =>
+ tp2 match {
+ case ct2: ConstantType =>
+ return (ct1.value == ct2.value)
+ case _ =>
+ }
+ case rt1: RefinedType =>
+ tp2 match {
+ case rt2: RefinedType => //
+ def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
+ sym2 =>
+ var e1 = s1.lookupEntry(sym2.name)
+ (e1 ne null) && {
+ val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
+ var isEqual = false
+ while (!isEqual && (e1 ne null)) {
+ isEqual = e1.sym.info =:= substSym
+ e1 = s1.lookupNextEntry(e1)
+ }
+ isEqual
+ }
+ }
+ //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
+ return isSameTypes(rt1.parents, rt2.parents) && {
+ val decls1 = rt1.decls
+ val decls2 = rt2.decls
+ isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
+ }
+ case _ =>
+ }
+ case mt1: MethodType =>
+ tp2 match {
+ case mt2: MethodType =>
+ return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
+ mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
+ mt1.isImplicit == mt2.isImplicit
+ // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
+ case _ =>
+ }
+ case NullaryMethodType(restpe1) =>
+ tp2 match {
+ // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
+ case NullaryMethodType(restpe2) =>
+ return restpe1 =:= restpe2
+ case _ =>
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+ // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
+ sameLength(tparams1, tparams2) &&
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case TypeBounds(lo1, hi1) =>
+ tp2 match {
+ case TypeBounds(lo2, hi2) =>
+ return lo1 =:= lo2 && hi1 =:= hi2
+ case _ =>
+ }
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp2
+ case _ =>
+ }
+ tp2 match {
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp1
+ case _ =>
+ }
+ tp1 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp2, typeVarLHS = true)
+ case _ =>
+ }
+ tp2 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp1, typeVarLHS = false)
+ case _ =>
+ }
+ tp1 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp2 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp1 match {
+ case _: SingletonType =>
+ tp2 match {
+ case _: SingletonType =>
+ def chaseDealiasedUnderlying(tp: Type): Type = {
+ var origin = tp
+ var next = origin.underlying.dealias
+ while (next.isInstanceOf[SingletonType]) {
+ assert(origin ne next, origin)
+ origin = next
+ next = origin.underlying.dealias
+ }
+ origin
+ }
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+ }
+
+ def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
+
+ def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
+ subsametypeRecursions += 1
+
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+ // if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ // val p = new SubTypePair(tp1, tp2)
+ // if (pendingSubTypes(p))
+ // false
+ // else
+ // try {
+ // pendingSubTypes += p
+ // isSubType2(tp1, tp2, depth)
+ // } finally {
+ // pendingSubTypes -= p
+ // }
+ // } else {
+ // isSubType2(tp1, tp2, depth)
+ // }
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType2(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType2(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
+
+ result
+ } finally undoLog.unlock()
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
+ val PolyType(tparams1, res1) = tp1
+ val PolyType(tparams2, res2) = tp2
+
+ sameLength(tparams1, tparams2) && {
+ // fast-path: polymorphic method type -- type params cannot be captured
+ val isMethod = tparams1.head.owner.isMethod
+ //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala
+ val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
+ def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
+ def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
+ def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
+
+ (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
+ }
+ }
+
+ // @assume tp1.isHigherKinded || tp2.isHigherKinded
+ def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
+ case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side
+ case (_, TypeRef(_, NothingClass, _)) => false
+ case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType)
+ case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side
+ case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes
+ def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s"
+ devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}")
+ false
+ }
+
+ ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type
+ || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
+ || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's
+ )
+ }
+
+ /** Does type `tp1` conform to `tp2`? */
+ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
+ if ((tp1 eq NoType) || (tp2 eq NoType)) return false
+ if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
+ if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
+ if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
+
+ /** First try, on the right:
+ * - unwrap Annotated types, BoundedWildcardTypes,
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - handle common cases for first-kind TypeRefs on both sides as a fast path.
+ */
+ def firstTry = tp2 match {
+ // fast path: two typerefs, none of them HK
+ case tr2: TypeRef =>
+ tp1 match {
+ case tr1: TypeRef =>
+ val sym1 = tr1.sym
+ val sym2 = tr2.sym
+ val pre1 = tr1.pre
+ val pre2 = tr2.pre
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+ else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
+ (isUnifiable(pre1, pre2) ||
+ isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
+ sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
+ isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
+ ||
+ sym2.isClass && {
+ val base = tr1 baseType sym2
+ (base ne tr1) && isSubType(base, tr2, depth)
+ }
+ ||
+ thirdTryRef(tr1, tr2))
+ case _ =>
+ secondTry
+ }
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1, bounds.hi, depth)
+ case tv2 @ TypeVar(_, constr2) =>
+ tp1 match {
+ case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
+ secondTry
+ case _ =>
+ tv2.registerBound(tp1, isLowerBound = true)
+ }
+ case _ =>
+ secondTry
+ }
+
+ /** Second try, on the left:
+ * - unwrap AnnotatedTypes, BoundedWildcardTypes,
+ * - bind typevars,
+ * - handle existential types by skolemization.
+ */
+ def secondTry = tp1 match {
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1.bounds.lo, tp2, depth)
+ case tv @ TypeVar(_,_) =>
+ tv.registerBound(tp2, isLowerBound = false)
+ case ExistentialType(_, _) =>
+ try {
+ skolemizationLevel += 1
+ isSubType(tp1.skolemizeExistential, tp2, depth)
+ } finally {
+ skolemizationLevel -= 1
+ }
+ case _ =>
+ thirdTry
+ }
+
+ def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ val sym2 = tp2.sym
+ sym2 match {
+ case NotNullClass => tp1.isNotNull
+ case SingletonClass => tp1.isStable || fourthTry
+ case _: ClassSymbol =>
+ if (isRawType(tp2))
+ isSubType(tp1, rawToExistential(tp2), depth)
+ else if (sym2.name == tpnme.REFINE_CLASS_NAME)
+ isSubType(tp1, sym2.info, depth)
+ else
+ fourthTry
+ case _: TypeSymbol =>
+ if (sym2 hasFlag DEFERRED) {
+ val tp2a = tp2.bounds.lo
+ isDifferentTypeConstructor(tp2, tp2a) &&
+ isSubType(tp1, tp2a, depth) ||
+ fourthTry
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ fourthTry
+ }
+ }
+
+ /** Third try, on the right:
+ * - decompose refined types.
+ * - handle typerefs, existentials, and notnull types.
+ * - handle left+right method types, polytypes, typebounds
+ */
+ def thirdTry = tp2 match {
+ case tr2: TypeRef =>
+ thirdTryRef(tp1, tr2)
+ case rt2: RefinedType =>
+ (rt2.parents forall (isSubType(tp1, _, depth))) &&
+ (rt2.decls forall (specializesSym(tp1, _, depth)))
+ case et2: ExistentialType =>
+ et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
+ case nn2: NotNullType =>
+ tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
+ case mt2: MethodType =>
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ val params2 = mt2.params
+ val res2 = mt2.resultType
+ (sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ isSubType(res1.substSym(params1, params2), res2, depth))
+ // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
+ case _ =>
+ false
+ }
+ case pt2 @ NullaryMethodType(_) =>
+ tp1 match {
+ // TODO: consider MethodType mt for which mt.params.isEmpty??
+ case pt1 @ NullaryMethodType(_) =>
+ isSubType(pt1.resultType, pt2.resultType, depth)
+ case _ =>
+ false
+ }
+ case TypeBounds(lo2, hi2) =>
+ tp1 match {
+ case TypeBounds(lo1, hi1) =>
+ isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
+ case _ =>
+ false
+ }
+ case _ =>
+ fourthTry
+ }
+
+ /** Fourth try, on the left:
+ * - handle typerefs, refined types, notnull and singleton types.
+ */
+ def fourthTry = tp1 match {
+ case tr1 @ TypeRef(pre1, sym1, _) =>
+ sym1 match {
+ case NothingClass => true
+ case NullClass =>
+ tp2 match {
+ case TypeRef(_, sym2, _) =>
+ containsNull(sym2)
+ case _ =>
+ isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
+ }
+ case _: ClassSymbol =>
+ if (isRawType(tp1))
+ isSubType(rawToExistential(tp1), tp2, depth)
+ else if (sym1.isModuleClass) tp2 match {
+ case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
+ case _ => false
+ }
+ else if (sym1.isRefinementClass)
+ isSubType(sym1.info, tp2, depth)
+ else false
+
+ case _: TypeSymbol =>
+ if (sym1 hasFlag DEFERRED) {
+ val tp1a = tp1.bounds.hi
+ isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ false
+ }
+ case RefinedType(parents1, _) =>
+ parents1 exists (isSubType(_, tp2, depth))
+ case _: SingletonType | _: NotNullType =>
+ isSubType(tp1.underlying, tp2, depth)
+ case _ =>
+ false
+ }
+
+ firstTry
+ }
+
+
+ def isWeakSubType(tp1: Type, tp2: Type) =
+ tp1.deconst.normalize match {
+ case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ isNumericSubClass(sym1, sym2)
+ case tv2 @ TypeVar(_, _) =>
+ tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case tv1 @ TypeVar(_, _) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+
+ /** The isNumericValueType tests appear redundant, but without them
+ * test/continuations-neg/function3.scala goes into an infinite loop.
+ * (Even if the calls are to typeSymbolDirect.)
+ */
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
+ isNumericValueType(tp1)
+ && isNumericValueType(tp2)
+ && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
+ )
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
new file mode 100644
index 0000000000..a002b01f70
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -0,0 +1,282 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ generic }
+import generic.Clearable
+
+
+private[internal] trait TypeConstraints {
+ self: SymbolTable =>
+ import definitions._
+
+ /** A log of type variable with their original constraints. Used in order
+ * to undo constraints in the case of isSubType/isSameType failure.
+ */
+ lazy val undoLog = newUndoLog
+
+ protected def newUndoLog = new UndoLog
+
+ class UndoLog extends Clearable {
+ private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ //OPT this method is public so we can do `manual inlining`
+ var log: UndoPairs = List()
+
+ /*
+ * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
+ *
+ * The idea behind explicit locking mechanism is that all public methods that access mutable state
+ * will have to obtain the lock for their entire execution so both reads and writes can be kept in
+ * right order. Originally, that was achieved by overriding those public methods in
+ * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
+ * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
+ * can go away.
+ *
+ * By using explicit locking we can achieve inlining.
+ *
+ * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
+ * places implementation of `undo` or `undoUnless`). This should be changed back to protected
+ * once inliner is fixed.
+ */
+ def lock(): Unit = ()
+ def unlock(): Unit = ()
+
+ // register with the auto-clearing cache manager
+ perRunCaches.recordCache(this)
+
+ /** Undo all changes to constraints to type variables upto `limit`. */
+ //OPT this method is public so we can do `manual inlining`
+ def undoTo(limit: UndoPairs) {
+ assertCorrectThread()
+ while ((log ne limit) && log.nonEmpty) {
+ val (tv, constr) = log.head
+ tv.constr = constr
+ log = log.tail
+ }
+ }
+
+ /** No sync necessary, because record should only
+ * be called from within an undo or undoUnless block,
+ * which is already synchronized.
+ */
+ private[reflect] def record(tv: TypeVar) = {
+ log ::= ((tv, tv.constr.cloneInternal))
+ }
+
+ def clear() {
+ lock()
+ try {
+ if (settings.debug.value)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
+ } finally unlock()
+ }
+
+ // `block` should not affect constraints on typevars
+ def undo[T](block: => T): T = {
+ lock()
+ try {
+ val before = log
+
+ try block
+ finally undoTo(before)
+ } finally unlock()
+ }
+ }
+
+ /** @PP: Unable to see why these apparently constant types should need vals
+ * in every TypeConstraint, I lifted them out.
+ */
+ private lazy val numericLoBound = IntClass.tpe
+ private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
+
+ /** A class expressing upper and lower bounds constraints of type variables,
+ * as well as their instantiations.
+ */
+ class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
+ def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
+ def this() = this(List(), List())
+
+ /* Syncnote: Type constraints are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of any
+ * variables should be ncessesary.
+ */
+
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound. [Actually
+ * guarding addLoBound/addHiBound somehow broke raw types so it
+ * only guards against being created with them.]
+ */
+ private var lobounds = lo0 filterNot typeIsNothing
+ private var hibounds = hi0 filterNot typeIsAny
+ private var numlo = numlo0
+ private var numhi = numhi0
+ private var avoidWidening = avoidWidening0
+
+ def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
+ def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
+ def avoidWiden: Boolean = avoidWidening
+
+ def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+ // For some reason which is still a bit fuzzy, we must let Nothing through as
+ // a lower bound despite the fact that Nothing is always a lower bound. My current
+ // supposition is that the side-effecting type constraint accumulation mechanism
+ // depends on these subtype tests being performed to make forward progress when
+ // there are mutally recursive type vars.
+ // See pos/t6367 and pos/t6499 for the competing test cases.
+ val mustConsider = tp.typeSymbol match {
+ case NothingClass => true
+ case _ => !(lobounds contains tp)
+ }
+ if (mustConsider) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
+ }
+ }
+
+ def checkWidening(tp: Type) {
+ if(tp.isStable) avoidWidening = true
+ else tp match {
+ case HasTypeMember(_, _) => avoidWidening = true
+ case _ =>
+ }
+ }
+
+ def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+ // My current test case only demonstrates the need to let Nothing through as
+ // a lower bound, but I suspect the situation is symmetrical.
+ val mustConsider = tp.typeSymbol match {
+ case AnyClass => true
+ case _ => !(hibounds contains tp)
+ }
+ if (mustConsider) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
+ }
+ }
+
+ def isWithinBounds(tp: Type): Boolean =
+ lobounds.forall(_ <:< tp) &&
+ hibounds.forall(tp <:< _) &&
+ (numlo == NoType || (numlo weak_<:< tp)) &&
+ (numhi == NoType || (tp weak_<:< numhi))
+
+ var inst: Type = NoType // @M reduce visibility?
+
+ def instValid = (inst ne null) && (inst ne NoType)
+
+ def cloneInternal = {
+ val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
+ tc.inst = inst
+ tc
+ }
+
+ override def toString = {
+ val boundsStr = {
+ val lo = loBounds filterNot typeIsNothing
+ val hi = hiBounds filterNot typeIsAny
+ val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
+ val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
+
+ lostr ++ histr mkString ("[", " | ", "]")
+ }
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
+ }
+
+ /** Solve constraint collected in types `tvars`.
+ *
+ * @param tvars All type variables to be instantiated.
+ * @param tparams The type parameters corresponding to `tvars`
+ * @param variances The variances of type parameters; need to reverse
+ * solution direction for all contravariant variables.
+ * @param upper When `true` search for max solution else min.
+ */
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean): Boolean =
+ solve(tvars, tparams, variances, upper, AnyDepth)
+
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean, depth: Int): Boolean = {
+
+ def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
+ if (tvar.constr.inst == NoType) {
+ val up = if (variance.isContravariant) !upper else upper
+ tvar.constr.inst = null
+ val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
+ //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
+ var cyclic = bound contains tparam
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
+ )
+ if (ok) {
+ if (tvar2.constr.inst eq null) cyclic = true
+ solveOne(tvar2, tparam2, variance2)
+ }
+ })
+ if (!cyclic) {
+ if (up) {
+ if (bound.typeSymbol != AnyClass) {
+ log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ } else {
+ if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+ log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ }
+ }
+ tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
+
+ //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
+ val newInst = (
+ if (up) {
+ if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
+ } else {
+ if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
+ }
+ )
+ log(s"$tvar setInst $newInst")
+ tvar setInst newInst
+ //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+ }
+ }
+
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
+ foreach3(tvars, tparams, variances)(solveOne)
+ tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
new file mode 100644
index 0000000000..51363c0f82
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -0,0 +1,1144 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable, immutable }
+import Flags._
+import scala.annotation.tailrec
+import Variance._
+
+private[internal] trait TypeMaps {
+ self: SymbolTable =>
+ import definitions._
+
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** Remove any occurrence of type <singleton> from this type and its parents */
+ object dropSingletonType extends TypeMap {
+ def apply(tp: Type): Type = {
+ tp match {
+ case TypeRef(_, SingletonClass, _) =>
+ AnyClass.tpe
+ case tp1 @ RefinedType(parents, decls) =>
+ parents filter (_.typeSymbol != SingletonClass) match {
+ case Nil => AnyClass.tpe
+ case p :: Nil if decls.isEmpty => mapOver(p)
+ case ps => mapOver(copyRefinedType(tp1, ps, decls))
+ }
+ case tp1 =>
+ mapOver(tp1)
+ }
+ }
+ }
+
+ /** Type with all top-level occurrences of abstract types replaced by their bounds */
+ object abstractTypesToBounds extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias)
+ case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi)
+ case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls)
+ case AnnotatedType(_, _, _) => mapOver(tp)
+ case _ => tp // no recursion - top level only
+ }
+ }
+
+ // Set to true for A* => Seq[A]
+ // (And it will only rewrite A* in method result types.)
+ // This is the pre-existing behavior.
+ // Or false for Seq[A] => Seq[A]
+ // (It will rewrite A* everywhere but method parameters.)
+ // This is the specified behavior.
+ protected def etaExpandKeepsStar = false
+
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
+ object dropIllegalStarTypes extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case MethodType(params, restpe) =>
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
+ seqType(arg)
+ case _ =>
+ if (etaExpandKeepsStar) tp else mapOver(tp)
+ }
+ }
+
+ trait AnnotationFilter extends TypeMap {
+ def keepAnnotation(annot: AnnotationInfo): Boolean
+
+ override def mapOver(annot: AnnotationInfo) =
+ if (keepAnnotation(annot)) super.mapOver(annot)
+ else UnmappableAnnotation
+ }
+
+ trait KeepOnlyTypeConstraints extends AnnotationFilter {
+ // filter keeps only type constraint annotations
+ def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
+ }
+
+ // todo. move these into scala.reflect.api
+
+ /** A prototype for mapping a function over all possible types
+ */
+ abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) {
+ def this() = this(trackVariance = false)
+ def apply(tp: Type): Type
+
+ private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant
+
+ def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x }
+ def variance = _variance
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = tp match {
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 = (
+ if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty)
+ mapOverArgs(args, sym.typeParams)
+ else
+ args mapConserve this
+ )
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case ThisType(_) => tp
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else singleType(pre1, sym)
+ }
+ case MethodType(params, result) =>
+ val params1 = flipped(mapOver(params))
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ val tparams1 = flipped(mapOver(tparams))
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case NullaryMethodType(result) =>
+ val result1 = this(result)
+ if (result1 eq result) tp
+ else NullaryMethodType(result1)
+ case ConstantType(_) => tp
+ case SuperType(thistp, supertp) =>
+ val thistp1 = this(thistp)
+ val supertp1 = this(supertp)
+ if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
+ else SuperType(thistp1, supertp1)
+ case TypeBounds(lo, hi) =>
+ val lo1 = flipped(this(lo))
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case BoundedWildcardType(bounds) =>
+ val bounds1 = this(bounds)
+ if (bounds1 eq bounds) tp
+ else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
+ case rtp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve this
+ val decls1 = mapOver(decls)
+ copyRefinedType(rtp, parents1, decls1)
+ case ExistentialType(tparams, result) =>
+ val tparams1 = mapOver(tparams)
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ case OverloadedType(pre, alts) =>
+ val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
+ if (pre1 eq pre) tp
+ else OverloadedType(pre1, alts)
+ case AntiPolyType(pre, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve this
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else AntiPolyType(pre1, args1)
+ case tv@TypeVar(_, constr) =>
+ if (constr.instValid) this(constr.inst)
+ else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
+ case NotNullType(tp) =>
+ val tp1 = this(tp)
+ if (tp1 eq tp) tp
+ else NotNullType(tp1)
+ case AnnotatedType(annots, atp, selfsym) =>
+ val annots1 = mapOverAnnotations(annots)
+ val atp1 = this(atp)
+ if ((annots1 eq annots) && (atp1 eq atp)) tp
+ else if (annots1.isEmpty) atp1
+ else AnnotatedType(annots1, atp1, selfsym)
+ /*
+ case ErrorType => tp
+ case WildcardType => tp
+ case NoType => tp
+ case NoPrefix => tp
+ case ErasedSingleType(sym) => tp
+ */
+ case _ =>
+ tp
+ // throw new Error("mapOver inapplicable for " + tp);
+ }
+
+ def withVariance[T](v: Variance)(body: => T): T = {
+ val saved = variance
+ variance = v
+ try body finally variance = saved
+ }
+ @inline final def flipped[T](body: => T): T = {
+ if (trackVariance) variance = variance.flip
+ try body
+ finally if (trackVariance) variance = variance.flip
+ }
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = (
+ if (trackVariance)
+ map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg)))
+ else
+ args mapConserve this
+ )
+ /** Applies this map to the symbol's info, setting variance = Invariant
+ * if necessary when the symbol is an alias.
+ */
+ private def applyToSymbolInfo(sym: Symbol): Type = {
+ if (trackVariance && !variance.isInvariant && sym.isAliasType)
+ withVariance(Invariant)(this(sym.info))
+ else
+ this(sym.info)
+ }
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = {
+ @tailrec def loop(syms: List[Symbol]): Boolean = syms match {
+ case Nil => true
+ case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs)
+ }
+ loop(origSyms)
+ }
+
+ /** Map this function over given scope */
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ /** Map this function over given list of symbols */
+ def mapOver(origSyms: List[Symbol]): List[Symbol] = {
+ // fast path in case nothing changes due to map
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
+ }
+
+ def mapOver(annot: AnnotationInfo): AnnotationInfo = {
+ val AnnotationInfo(atp, args, assocs) = annot
+ val atp1 = mapOver(atp)
+ val args1 = mapOverAnnotArgs(args)
+ // there is no need to rewrite assocs, as they are constants
+
+ if ((args eq args1) && (atp eq atp1)) annot
+ else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
+ else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+ }
+
+ def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
+ val annots1 = annots mapConserve mapOver
+ if (annots1 eq annots) annots
+ else annots1 filterNot (_ eq UnmappableAnnotation)
+ }
+
+ /** Map over a set of annotation arguments. If any
+ * of the arguments cannot be mapped, then return Nil. */
+ def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
+ val args1 = args mapConserve mapOver
+ if (args1 contains UnmappableTree) Nil
+ else args1
+ }
+
+ def mapOver(tree: Tree): Tree =
+ mapOver(tree, () => return UnmappableTree)
+
+ /** Map a tree that is part of an annotation argument.
+ * If the tree cannot be mapped, then invoke giveup().
+ * The default is to transform the tree with
+ * TypeMapTransformer.
+ */
+ def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
+ (new TypeMapTransformer).transform(tree)
+
+ /** This transformer leaves the tree alone except to remap
+ * its types. */
+ class TypeMapTransformer extends Transformer {
+ override def transform(tree: Tree) = {
+ val tree1 = super.transform(tree)
+ val tpe1 = TypeMap.this(tree1.tpe)
+ if ((tree eq tree1) && (tree.tpe eq tpe1))
+ tree
+ else
+ tree1.shallowDuplicate.setType(tpe1)
+ }
+ }
+ }
+
+ abstract class TypeTraverser extends TypeMap {
+ def traverse(tp: Type): Unit
+ def apply(tp: Type): Type = { traverse(tp); tp }
+ }
+
+ abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+ def result: T
+ def clear(): Unit
+ }
+
+ abstract class TypeCollector[T](initial: T) extends TypeTraverser {
+ var result: T = _
+ def collect(tp: Type) = {
+ result = initial
+ traverse(tp)
+ result
+ }
+ }
+
+ /** The raw to existential map converts a ''raw type'' to an existential type.
+ * It is necessary because we might have read a raw type of a
+ * parameterized Java class from a class file. At the time we read the type
+ * the corresponding class file might still not be read, so we do not
+ * know what the type parameters of the type are. Therefore
+ * the conversion of raw types to existential types might not have taken place
+ * in ClassFileparser.sigToType (where it is usually done).
+ */
+ def rawToExistential = new TypeMap {
+ private var expanded = immutable.Set[Symbol]()
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
+ if (expanded contains sym) AnyRefClass.tpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ /***
+ *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+ object rawToExistentialInJava extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ // any symbol that occurs in a java sig, not just java symbols
+ // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+ case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+ val eparams = typeParamsToExistentials(sym, sym.typeParams)
+ existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ */
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ /** If these conditions all hold:
+ * 1) we are in covariant (or contravariant) position
+ * 2) this type occurs exactly once in the existential scope
+ * 3) the widened upper (or lower) bound of this type contains no references to tparams
+ * Then we replace this lone occurrence of the type with the widened upper (or lower) bound.
+ * All other types pass through unchanged.
+ */
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance.isInvariant) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ val count = occurCount(sym)
+ val containsTypeParam = tparams exists (repl contains _)
+ def msg = {
+ val word = if (variance.isPositive) "upper" else "lower"
+ s"Widened lone occurrence of $tp1 inside existential to $word bound"
+ }
+ if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
+ logResult(msg)(repl)
+ else
+ tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
+
+ /** Might the given symbol be important when calculating the prefix
+ * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+ * the result will be `tp` unchanged if `pre` is trivial and `clazz`
+ * is a symbol such that isPossiblePrefix(clazz) == false.
+ */
+ def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
+ protected[internal] def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
+ def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new AsSeenFromMap(pre, clazz)
+
+ /** A map to compute the asSeenFrom method.
+ */
+ class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ // Some example source constructs relevant in asSeenFrom:
+ //
+ // object CaptureThis {
+ // trait X[A] { def f: this.type = this }
+ // class Y[A] { def f: this.type = this }
+ // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton
+ // def f1[B] = new X[B] { }
+ // // TODO - why is the behavior different when it's a class?
+ // def f2[B] = new Y[B] { }
+ // }
+ // class CaptureVal[T] {
+ // val f: java.util.List[_ <: T] = null
+ // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1
+ // def g = f get 0
+ // }
+ // class ClassParam[T] {
+ // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam)
+ // class Inner(lhs: T) { def f = lhs }
+ // }
+ def capturedParams: List[Symbol] = _capturedParams
+ def capturedSkolems: List[Symbol] = _capturedSkolems
+
+ def apply(tp: Type): Type = tp match {
+ case tp @ ThisType(_) => thisTypeAsSeen(tp)
+ case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp)
+ case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp)
+ case _ => mapOver(tp)
+ }
+
+ private var _capturedSkolems: List[Symbol] = Nil
+ private var _capturedParams: List[Symbol] = Nil
+ private val isStablePrefix = seenFromPrefix.isStable
+
+ // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate
+ // but less succinct name.
+ private def isBaseClassOfEnclosingClass(base: Symbol) = {
+ def loop(encl: Symbol): Boolean = (
+ isPossiblePrefix(encl)
+ && ((encl isSubClass base) || loop(encl.owner.enclClass))
+ )
+ // The hasCompleteInfo guard is necessary to avoid cycles during the typing
+ // of certain classes, notably ones defined inside package objects.
+ !base.hasCompleteInfo || loop(seenFromClass)
+ }
+
+ /** Is the symbol a class type parameter from one of the enclosing
+ * classes, or a base class of one of them?
+ */
+ private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = (
+ sym.isTypeParameter
+ && sym.owner.isClass
+ && isBaseClassOfEnclosingClass(sym.owner)
+ )
+
+ /** Creates an existential representing a type parameter which appears
+ * in the prefix of a ThisType.
+ */
+ protected def captureThis(pre: Type, clazz: Symbol): Type = {
+ capturedParams find (_.owner == clazz) match {
+ case Some(p) => p.tpe
+ case _ =>
+ val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ _capturedParams ::= qvar
+ debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
+ qvar.tpe
+ }
+ }
+ protected def captureSkolems(skolems: List[Symbol]) {
+ for (p <- skolems; if !(capturedSkolems contains p)) {
+ debuglog(s"Captured $p seen from $seenFromPrefix")
+ _capturedSkolems ::= p
+ }
+ }
+
+ /** Find the type argument in an applied type which corresponds to a type parameter.
+ * The arguments are required to be related as follows, through intermediary `clazz`.
+ * An exception will be thrown if this is violated.
+ *
+ * @param lhs its symbol is a type parameter of `clazz`
+ * @param rhs a type application constructed from `clazz`
+ */
+ private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
+ val TypeRef(_, lhsSym, lhsArgs) = lhs
+ val TypeRef(_, rhsSym, rhsArgs) = rhs
+ require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+
+ // Find the type parameter position; we'll use the corresponding argument
+ val argIndex = rhsSym.typeParams indexOf lhsSym
+
+ if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application
+ appliedType(rhsArgs(argIndex), lhsArgs mapConserve this)
+ else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641
+ ErrorType
+ else
+ abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs")
+ }
+
+ // 0) @pre: `classParam` is a class type parameter
+ // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam`
+ // 2) Take the base type of the prefix at that point with respect to the owning class
+ // 3) Solve for the type parameters through correspondence with the type args of the base type
+ //
+ // Only class type parameters (and not skolems) are considered, because other type parameters
+ // are not influenced by the prefix through which they are seen. Note that type params of
+ // anonymous type functions, which currently can only arise from normalising type aliases, are
+ // owned by the type alias of which they are the eta-expansion.
+ private def classParameterAsSeen(classParam: Type): Type = {
+ val TypeRef(_, tparam, _) = classParam
+
+ def loop(pre: Type, clazz: Symbol): Type = {
+ // have to deconst because it may be a Class[T]
+ def nextBase = (pre baseType clazz).deconst
+ //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
+ if (skipPrefixOf(pre, clazz))
+ mapOver(classParam)
+ else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
+ loop(nextBase.prefix, clazz.owner)
+ else nextBase match {
+ case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied)
+ case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
+ case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
+ }
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ // Does the candidate symbol match the given prefix and class?
+ // Since pre may be something like ThisType(A) where trait A { self: B => },
+ // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
+ // B will not be considered.
+ private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match {
+ case _: TypeVar => false
+ case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz)
+ }
+
+ // Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
+ private[this] var wroteAnnotation = false
+ private object annotationArgRewriter extends TypeMapTransformer {
+ private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz)
+
+ // what symbol should really be used?
+ private def newThis(): Tree = {
+ wroteAnnotation = true
+ val presym = seenFromPrefix.widen.typeSymbol
+ val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix
+ gen.mkAttributedQualifier(seenFromPrefix, thisSym)
+ }
+
+ /** Rewrite `This` trees in annotation argument trees */
+ override def transform(tree: Tree): Tree = super.transform(tree) match {
+ case This(_) if matchesThis(tree.symbol) => newThis()
+ case tree => tree
+ }
+ }
+
+ // This becomes considerably cheaper if we optimize for the common cases:
+ // where the prefix is stable and where no This nodes are rewritten. If
+ // either is true, then we don't need to worry about calling giveup. So if
+ // the prefix is unstable, use a stack variable to indicate whether the tree
+ // was touched. This takes us to one allocation per AsSeenFromMap rather
+ // than an allocation on every call to mapOver, and no extra work when the
+ // tree only has its types remapped.
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ if (isStablePrefix)
+ annotationArgRewriter transform tree
+ else {
+ val saved = wroteAnnotation
+ wroteAnnotation = false
+ try annotationArgRewriter transform tree
+ finally if (wroteAnnotation) giveup() else wroteAnnotation = saved
+ }
+ }
+
+ private def thisTypeAsSeen(tp: ThisType): Type = {
+ def loop(pre: Type, clazz: Symbol): Type = {
+ val pre1 = pre match {
+ case SuperType(thistpe, _) => thistpe
+ case _ => pre
+ }
+ if (skipPrefixOf(pre, clazz))
+ mapOver(tp) // TODO - is mapOver necessary here?
+ else if (!matchesPrefixAndClass(pre, clazz)(tp.sym))
+ loop((pre baseType clazz).prefix, clazz.owner)
+ else if (pre1.isStable)
+ pre1
+ else
+ captureThis(pre1, clazz)
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ private def singleTypeAsSeen(tp: SingleType): Type = {
+ val SingleType(pre, sym) = tp
+
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else if (pre1.isStable) singleType(pre1, sym)
+ else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
+ }
+
+ override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)"
+ }
+
+ /** A base class to compute all substitutions */
+ abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
+
+ /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
+ protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
+
+ /** Map target to type, can be tuned by subclasses */
+ protected def toType(fromtp: Type, tp: T): Type
+
+ protected def renameBoundSyms(tp: Type): Type = tp match {
+ case MethodType(ps, restp) =>
+ createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
+ case PolyType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
+ case ExistentialType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)(newExistentialType)
+ case _ =>
+ tp
+ }
+
+ @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = (
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
+ )
+
+ def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+ val boundSyms = tp0.boundSyms
+ val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
+ val tp = mapOver(tp1)
+ def substFor(sym: Symbol) = subst(tp, sym, from, to)
+
+ tp match {
+ // @M
+ // 1) arguments must also be substituted (even when the "head" of the
+ // applied type has already been substituted)
+ // example: (subst RBound[RT] from [type RT,type RBound] to
+ // [type RT&,type RBound&]) = RBound&[RT&]
+ // 2) avoid loops (which occur because alpha-conversion is
+ // not performed properly imo)
+ // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
+ // we must replace the a in Iterable[a] by (a,b)
+ // (must not recurse --> loops)
+ // 3) replacing m by List in m[Int] should yield List[Int], not just List
+ case TypeRef(NoPrefix, sym, args) =>
+ val tcon = substFor(sym)
+ if ((tp eq tcon) || args.isEmpty) tcon
+ else appliedType(tcon.typeConstructor, args)
+ case SingleType(NoPrefix, sym) =>
+ substFor(sym)
+ case _ =>
+ tp
+ }
+ }
+ }
+
+ /** A map to implement the `substSym` method. */
+ class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
+ protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
+ case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
+ case SingleType(pre, _) => singleType(pre, sym)
+ }
+ @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = (
+ if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
+ else if (matches(from.head, sym)) to.head
+ else subst(sym, from.tail, to.tail)
+ )
+ private def substFor(sym: Symbol) = subst(sym, from, to)
+
+ override def apply(tp: Type): Type = (
+ if (from.isEmpty) tp
+ else tp match {
+ case TypeRef(pre, sym, args) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ // mapOver takes care of subst'ing in args
+ mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
+ // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
+ case SingleType(pre, sym) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
+ case _ =>
+ super.apply(tp)
+ }
+ )
+
+ object mapTreeSymbols extends TypeMapTransformer {
+ val strictCopy = newStrictTreeCopier
+
+ def termMapsTo(sym: Symbol) = from indexOf sym match {
+ case -1 => None
+ case idx => Some(to(idx))
+ }
+
+ // if tree.symbol is mapped to another symbol, passes the new symbol into the
+ // constructor `trans` and sets the symbol and the type on the resulting tree.
+ def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
+ case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
+ case None => tree
+ }
+
+ // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
+ override def transform(tree: Tree) = {
+ // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
+ super.transform(tree) match {
+ case id @ Ident(_) =>
+ transformIfMapped(id)(toSym =>
+ strictCopy.Ident(id, toSym.name))
+
+ case sel @ Select(qual, name) =>
+ transformIfMapped(sel)(toSym =>
+ strictCopy.Select(sel, qual, toSym.name))
+
+ case tree => tree
+ }
+ }
+ }
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ mapTreeSymbols.transform(tree)
+ }
+ }
+
+ /** A map to implement the `subst` method. */
+ class SubstTypeMap(from: List[Symbol], to: List[Type])
+ extends SubstMap(from, to) {
+ protected def toType(fromtp: Type, tp: Type) = tp
+
+ override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
+ object trans extends TypeMapTransformer {
+ override def transform(tree: Tree) = tree match {
+ case Ident(name) =>
+ from indexOf tree.symbol match {
+ case -1 => super.transform(tree)
+ case idx =>
+ val totpe = to(idx)
+ if (totpe.isStable) tree.duplicate setType totpe
+ else giveup()
+ }
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ trans.transform(tree)
+ }
+ }
+
+ /** A map to implement the `substThis` method. */
+ class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) if (sym == from) => to
+ case _ => mapOver(tp)
+ }
+ }
+
+ class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
+ def apply(tp: Type): Type = try {
+ tp match {
+ case TypeRef(_, sym, _) if from contains sym =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
+ }
+ } catch {
+ case ex: MalformedType =>
+ WildcardType
+ }
+ }
+
+ // dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (tp.isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if (tp.isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+ /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+ */
+ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+ private val actuals = actuals0.toIndexedSeq
+ private val existentials = new Array[Symbol](actuals.size)
+ def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+
+ private object StableArg {
+ def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
+ tp.isStable && (tp.typeSymbol != NothingClass)
+ )
+ }
+ private object Arg {
+ def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
+ }
+
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // unsound to replace args by unstable actual #3873
+ case SingleType(NoPrefix, StableArg(arg)) => arg
+ // (soundly) expand type alias selections on implicit arguments,
+ // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
+ val arg = actuals(pid)
+ val res = typeRef(arg, sym, targs)
+ if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
+ // don't return the original `tp`, which may be different from `tp1`,
+ // due to dropping annotations
+ case tp1 => tp1
+ }
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ private def existentialFor(pid: Int) = {
+ if (existentials(pid) eq null) {
+ val param = params(pid)
+ existentials(pid) = (
+ param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
+ setInfo singletonBounds(actuals(pid))
+ )
+ }
+ existentials(pid)
+ }
+
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+ override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ // TODO: this should be simplified; in the stable case, one can
+ // probably just use an Ident to the tree.symbol.
+ //
+ // @PP: That leads to failure here, where stuff no longer has type
+ // 'String @Annot("stuff")' but 'String @Annot(x)'.
+ //
+ // def m(x: String): String @Annot(x) = x
+ // val stuff = m("stuff")
+ //
+ // (TODO cont.) Why an existential in the non-stable case?
+ //
+ // @PP: In the following:
+ //
+ // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+ //
+ // m is typed as 'String @Annot(x) forSome { val x: String }'.
+ //
+ // Both examples are from run/constrained-types.scala.
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = tree.symbol match {
+ case StableArg(actual) =>
+ gen.mkAttributedQualifier(actual, tree.symbol)
+ case Arg(pid) =>
+ val sym = existentialFor(pid)
+ Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ treeTrans transform arg
+ }
+ }
+
+ /** A map to convert every occurrence of a wildcard type to a fresh
+ * type variable */
+ object wildcardToTypeVarMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case WildcardType =>
+ TypeVar(tp, new TypeConstraint)
+ case BoundedWildcardType(bounds) =>
+ TypeVar(tp, new TypeConstraint(bounds))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** A map to convert every occurrence of a type variable to a wildcard type. */
+ object typeVarToOriginMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeVar(origin, _) => origin
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ tp.normalize match {
+ case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+ case SingleType(_, sym1) if (sym == sym1) => result = true
+ case _ => mapOver(tp)
+ }
+ }
+ }
+
+ override def mapOver(arg: Tree) = {
+ for (t <- arg) {
+ traverse(t.tpe)
+ if (t.symbol == sym)
+ result = true
+ }
+ arg
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ if (tp eq t) result = true
+ else mapOver(tp)
+ }
+ }
+ override def mapOver(arg: Tree) = {
+ for (t <- arg)
+ traverse(t.tpe)
+
+ arg
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (p(tp)) result ::= tp
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `collect` method. */
+ class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (pf.isDefinedAt(tp)) result ::= pf(tp)
+ mapOver(tp)
+ }
+ }
+
+ class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
+ def traverse(tp: Type) {
+ f(tp)
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
+ def traverse(tp: Type) {
+ if (result.isEmpty) {
+ if (p(tp)) result = Some(tp)
+ mapOver(tp)
+ }
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ object ErroneousCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ result = tp.isError
+ mapOver(tp)
+ }
+ }
+ }
+
+ object adaptToNewRunMap extends TypeMap {
+
+ private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
+ if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
+ sym
+ else if (sym.isModuleClass) {
+ val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
+
+ sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
+ }
+ else {
+ var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse {
+ if (sym.isAliasType) throw missingAliasException
+ devWarning(s"$pre.$sym no longer exist at phase $phase")
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
+ }
+ /** The two symbols have the same fully qualified name */
+ def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
+ if (!corresponds(sym.owner, rebind0.owner)) {
+ debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
+ val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
+ if (bcs.isEmpty)
+ assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
+ else
+ rebind0 = pre.baseType(bcs.head).member(sym.name)
+ debuglog(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym.fullLocationString +
+ ", rebind = " + rebind0.fullLocationString
+ )
+ }
+ rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
+ debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ throw new MalformedType(pre, sym.nameString)
+ }
+ }
+ }
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
+ case SingleType(pre, sym) =>
+ if (sym.isPackage) tp
+ else {
+ val pre1 = this(pre)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym)) tp
+ else singleType(pre1, sym1)
+ } catch {
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isPackageClass) tp
+ else {
+ val pre1 = this(pre)
+ val args1 = args mapConserve (this)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+ tp
+ } else if (sym1 == NoSymbol) {
+ devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym")
+ tp
+ } else {
+ copyTypeRef(tp, pre1, sym1, args1)
+ }
+ } catch {
+ case ex: MissingAliasControl =>
+ apply(tp.dealias)
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case MethodType(params, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else copyMethodType(tp, params, restp1)
+ case NullaryMethodType(restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else NullaryMethodType(restp1)
+ case PolyType(tparams, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else PolyType(tparams, restp1)
+
+ // Lukas: we need to check (together) whether we should also include parameter types
+ // of PolyType and MethodType in adaptToNewRun
+
+ case ClassInfoType(parents, decls, clazz) =>
+ if (clazz.isPackageClass) tp
+ else {
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else ClassInfoType(parents1, decls, clazz)
+ }
+ case RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
+ case SuperType(_, _) => mapOver(tp)
+ case TypeBounds(_, _) => mapOver(tp)
+ case TypeVar(_, _) => mapOver(tp)
+ case AnnotatedType(_,_,_) => mapOver(tp)
+ case NotNullType(_) => mapOver(tp)
+ case ExistentialType(_, _) => mapOver(tp)
+ case _ => tp
+ }
+ }
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
new file mode 100644
index 0000000000..263b0f5a3e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+package internal
+package tpe
+
+private[internal] trait TypeToStrings {
+ self: SymbolTable =>
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+
+ protected def typeToString(tpe: Type): String =
+ if (tostringRecursions >= maxTostringRecursions) {
+ devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe))
+ if (settings.debug.value)
+ (new Throwable).printStackTrace
+
+ "..."
+ }
+ else
+ try {
+ tostringRecursions += 1
+ tpe.safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 5467d70cea..a130013398 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -11,8 +11,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def inform(msg: String): Unit = log(msg)
def picklerPhase = internal.SomePhase
- def forInteractive = false
- def forScaladoc = false
lazy val settings = new Settings
private val isLogging = sys.props contains "scala.debug.reflect"
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala
index 434f19f21b..434f19f21b 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/repl/scala/tools/nsc/Interpreter.scala
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala
index a0be3f4fdb..a0be3f4fdb 100644
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ b/src/repl/scala/tools/nsc/InterpreterLoop.scala
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index adb03ca374..7195424cf9 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
-import interpreter.{ ILoop }
import GenericRunnerCommand._
object JarRunner extends CommonRunner {
@@ -78,7 +77,7 @@ class MainGenericRunner {
Right(false)
case _ =>
// We start the repl when no arguments are given.
- Right(new ILoop process settings)
+ Right(new interpreter.ILoop process settings)
}
/** If -e and -i were both given, we want to execute the -e code after the
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
new file mode 100644
index 0000000000..712219533d
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.io.AbstractFile
+
+@deprecated("Use `scala.tools.nsc.util.AbstractFileClassLoader`", "2.11.0")
+class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends util.AbstractFileClassLoader(root, parent)
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index e66e4eff29..e66e4eff29 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
index 48890a21c6..e1e3678837 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
@@ -28,15 +28,5 @@ object ByteCode {
method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
}
- /** Scala sig bytes.
- */
- def scalaSigBytesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
- }
- yield names
-
def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
index 0ab92ab769..0ab92ab769 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala
index 84a5cb49ae..84a5cb49ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
index 3dd5d93390..3dd5d93390 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
index d24ad60974..d24ad60974 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index 48af261937..48af261937 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
index e88a044931..e88a044931 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 9edd54b939..9edd54b939 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
index 43e653edfd..43e653edfd 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 2ea255319d..599a061984 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -11,7 +11,6 @@ import java.io.{ BufferedReader, FileReader }
import session._
import scala.annotation.tailrec
import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
-import scala.tools.util.{ Javap }
import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import io.{ File, Directory }
import util.ScalaClassLoader
@@ -19,7 +18,7 @@ import ScalaClassLoader._
import scala.tools.util._
import scala.language.{implicitConversions, existentials}
import scala.reflect.classTag
-import scala.tools.reflect.StdRuntimeTags._
+import StdReplTags._
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index c54b01dbb0..cb2e3c32b8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -15,6 +15,7 @@ import scala.tools.nsc.io.AbstractFile
import reporters._
import scala.tools.util.PathResolver
import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
import scala.collection.{ mutable, immutable }
@@ -22,7 +23,7 @@ import IMain._
import java.util.concurrent.Future
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
-import scala.tools.reflect.StdRuntimeTags._
+import StdReplTags._
/** An interpreter for Scala code.
*
@@ -80,8 +81,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* use a lazy val to ensure that any attempt to use the compiler object waits
* on the future.
*/
- private var _classLoader: AbstractFileClassLoader = null // active classloader
- private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
+ private var _classLoader: util.AbstractFileClassLoader = null // active classloader
+ private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
def compilerClasspath: Seq[URL] = (
if (isInitializeComplete) global.classPath.asURLs
@@ -271,7 +272,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (_classLoader == null)
_classLoader = makeClassLoader()
}
- def classLoader: AbstractFileClassLoader = {
+ def classLoader: util.AbstractFileClassLoader = {
ensureClassLoader()
_classLoader
}
@@ -318,7 +319,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
}
- private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) {
+ private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
* class name if the original attempt fails. This method is used by
* getResourceAsStream as well as findClass.
@@ -329,7 +330,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
case file => file
}
}
- private def makeClassLoader(): AbstractFileClassLoader =
+ private def makeClassLoader(): util.AbstractFileClassLoader =
new TranslatingClassLoader(parentClassLoader match {
case null => ScalaClassLoader fromURLs compilerClasspath
case p => new URLClassLoader(compilerClasspath, p)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
index 9541d08db1..9541d08db1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index ff7bfd432c..ff7bfd432c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 28ddf2939c..28ddf2939c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 19fa562234..19fa562234 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
index 5d41f1bbb4..5d41f1bbb4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
new file mode 100644
index 0000000000..a895944c15
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -0,0 +1,693 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.nsc.util.ScalaClassLoader
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.util.{ Locale }
+import java.util.regex.Pattern
+import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
+ ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
+ SimpleJavaFileObject, StandardLocation }
+import scala.reflect.io.{ AbstractFile, Directory, File, Path }
+import scala.io.Source
+import scala.util.{ Try, Success, Failure }
+import scala.util.Properties.lineSeparator
+import scala.collection.JavaConverters
+import scala.collection.generic.Clearable
+import java.net.URL
+import scala.language.reflectiveCalls
+import Javap._
+
+class JavapClass(
+ val loader: ScalaClassLoader,
+ val printWriter: PrintWriter,
+ intp: Option[IMain] = None
+) extends scala.tools.util.Javap {
+ import JavapTool.ToolArgs
+ import JavapClass._
+
+ lazy val tool = JavapTool()
+
+ /** Run the tool. Option args start with "-".
+ * The default options are "-protected -verbose".
+ * Byte data for filename args is retrieved with findBytes.
+ */
+ def apply(args: Seq[String]): List[JpResult] = {
+ val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
+ val (flags, upgraded) = upgrade(options)
+ import flags.{ app, fun, help, raw }
+ val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
+ if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter)))
+ else if (targets.isEmpty) List(JpResult("No anonfuns found."))
+ else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app)))
+ }
+
+ /** Cull our tool options. */
+ private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match {
+ case (t,s) if s.nonEmpty => (t,s)
+ case (t,s) => (t, JavapTool.DefaultOptions)
+ }
+
+ /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
+ private def bytesFor(path: String, app: Boolean) = Try {
+ def last = intp.get.mostRecentVar // fail if no intp
+ def req = if (path == "-") last else {
+ val s = path.splitHashMember
+ if (s._1.nonEmpty) s._1
+ else s._2 getOrElse "#"
+ }
+ def asAppBody(s: String) = {
+ val (cls, fix) = s.splitSuffix
+ s"${cls}$$delayedInit$$body${fix}"
+ }
+ def todo = if (app) asAppBody(req) else req
+ val bytes = findBytes(todo)
+ if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'")
+ else bytes
+ }
+
+ def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
+
+ /** Assume the string is a path and try to find the classfile
+ * it represents.
+ */
+ def tryFile(path: String): Option[Array[Byte]] =
+ (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
+
+ /** Assume the string is a fully qualified class name and try to
+ * find the class object it represents.
+ * There are other symbols of interest, too:
+ * - a definition that is wrapped in an enclosing class
+ * - a synthetic that is not in scope but its associated class is
+ */
+ def tryClass(path: String): Array[Byte] = {
+ def load(name: String) = loader classBytes name
+ def loadable(name: String) = loader resourceable name
+ // if path has an interior dollar, take it as a synthetic
+ // if the prefix up to the dollar is a symbol in scope,
+ // result is the translated prefix + suffix
+ def desynthesize(s: String) = {
+ val i = s indexOf '$'
+ if (0 until s.length - 1 contains i) {
+ val name = s substring (0, i)
+ val sufx = s substring i
+ val tran = intp flatMap (_ translatePath name)
+ def loadableOrNone(strip: Boolean) = {
+ def suffix(strip: Boolean)(x: String) =
+ (if (strip && (x endsWith "$")) x.init else x) + sufx
+ val res = tran map (suffix(strip) _)
+ if (res.isDefined && loadable(res.get)) res else None
+ }
+ // try loading translated+suffix
+ val res = loadableOrNone(strip = false)
+ // some synthetics lack a dollar, (e.g., suffix = delayedInit$body)
+ // so as a hack, if prefix$$suffix fails, also try prefix$suffix
+ if (res.isDefined) res else loadableOrNone(strip = true)
+ } else None
+ }
+ val p = path.asClassName // scrub any suffix
+ // if repl, translate the name to something replish
+ // (for translate, would be nicer to get the sym and ask .isClass,
+ // instead of translatePath and then asking did I get a class back)
+ val q = if (intp.isEmpty) p else (
+ // only simple names get the scope treatment
+ Some(p) filter (_ contains '.')
+ // take path as a Name in scope
+ orElse (intp flatMap (_ translatePath p) filter loadable)
+ // take path as a Name in scope and find its enclosing class
+ orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
+ // take path as a synthetic derived from some Name in scope
+ orElse desynthesize(p)
+ // just try it plain
+ getOrElse p
+ )
+ load(q)
+ }
+
+ /** Base class for javap tool adapters for java 6 and 7. */
+ abstract class JavapTool {
+ type ByteAry = Array[Byte]
+ type Input = Pair[String, Try[ByteAry]]
+
+ /** Run the tool. */
+ def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
+
+ // Since the tool is loaded by reflection, check for catastrophic failure.
+ protected def failed: Boolean
+ implicit protected class Failer[A](a: =>A) {
+ def orFailed[B >: A](b: =>B) = if (failed) b else a
+ }
+ protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.")
+
+ // output filtering support
+ val writer = new CharArrayWriter
+ def written = {
+ writer.flush()
+ val w = writer.toString
+ writer.reset()
+ w
+ }
+
+ /** Create a Showable with output massage.
+ * @param raw show ugly repl names
+ * @param target attempt to filter output to show region of interest
+ * @param preamble other messages to output
+ */
+ def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
+ // ReplStrippingWriter clips and scrubs on write(String)
+ // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+ def show() =
+ if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
+ else writeLines()
+ private def writeLines() {
+ // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+ // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+ var filtering = false // true if in region matching filter
+ // true to output
+ def checkFilter(line: String) = if (filterOn.isEmpty) true else {
+ // cheap heuristic, todo maybe parse for the java sig.
+ // method sigs end in paren semi
+ def isAnyMethod = line.endsWith(");")
+ def isOurMethod = {
+ val lparen = line.lastIndexOf('(')
+ val blank = line.lastIndexOf(' ', lparen)
+ (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
+ }
+ filtering = if (filtering) {
+ // next blank line terminates section
+ // for -public, next line is next method, more or less
+ line.trim.nonEmpty && !isAnyMethod
+ } else {
+ isAnyMethod && isOurMethod
+ }
+ filtering
+ }
+ for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
+ printWriter write line+lineSeparator
+ printWriter.flush()
+ }
+ }
+ }
+
+ class JavapTool6 extends JavapTool {
+ import JavapTool._
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
+ val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
+ override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
+ val printWrapper = new PrintWriter(writer)
+ def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
+ PrinterCtr.newInstance(in, printWrapper, env) orFailed null
+ def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
+ fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer
+ printWrapper.flush() // just in case
+ showWithPreamble(raw, target)
+ }
+
+ lazy val parser = new JpOptions
+ def newEnv(opts: Seq[String]): FakeEnvironment = {
+ def result = {
+ val env: FakeEnvironment = EnvClass.newInstance()
+ parser(opts) foreach { case (name, value) =>
+ val field = EnvClass getDeclaredField name
+ field setAccessible true
+ field.set(env, value.asInstanceOf[AnyRef])
+ }
+ env
+ }
+ result orFailed null
+ }
+
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
+ (inputs map {
+ case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ class JavapTool7 extends JavapTool {
+
+ import JavapTool._
+ type Task = {
+ def call(): Boolean // true = ok
+ //def run(args: Array[String]): Int // all args
+ //def handleOptions(args: Array[String]): Unit // options, then run() or call()
+ }
+ // result of Task.run
+ //object TaskResult extends Enumeration {
+ // val Ok, Error, CmdErr, SysErr, Abnormal = Value
+ //}
+ val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
+ override protected def failed = TaskClaas eq null
+
+ val TaskCtor = TaskClaas.getConstructor(
+ classOf[Writer],
+ classOf[JavaFileManager],
+ classOf[DiagnosticListener[_]],
+ classOf[JIterable[String]],
+ classOf[JIterable[String]]
+ ) orFailed null
+
+ class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
+ import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
+ type D = Diagnostic[_ <: JavaFileObject]
+ val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D]
+ override def report(d: Diagnostic[_ <: JavaFileObject]) {
+ diagnostics += d
+ }
+ override def clear() = diagnostics.clear()
+ /** All diagnostic messages.
+ * @param locale Locale for diagnostic messages, null by default.
+ */
+ def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList
+
+ def reportable(raw: Boolean): String = {
+ // don't filter this message if raw, since the names are likely to differ
+ val container = "Binary file .* contains .*".r
+ val m = if (raw) messages
+ else messages filter (_ match { case container() => false case _ => true })
+ clear()
+ if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
+ else ""
+ }
+ }
+ val reporter = new JavaReporter
+
+ // DisassemblerTool.getStandardFileManager(reporter,locale,charset)
+ val defaultFileManager: JavaFileManager =
+ (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod (
+ "create",
+ classOf[DiagnosticListener[_]],
+ classOf[PrintWriter]
+ ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null
+
+ // manages named arrays of bytes, which might have failed to load
+ class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager)
+ extends ForwardingJavaFileManager[JavaFileManager](delegate) {
+ import JavaFileObject.Kind
+ import Kind._
+ import StandardLocation._
+ import JavaFileManager.Location
+ import java.net.URI
+ def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
+
+ def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
+ def managedFile(name: String, kind: Kind) = kind match {
+ case CLASS => fileObjectForInput(name, inputNamed(name), kind)
+ case _ => null
+ }
+ // todo: just wrap it as scala abstractfile and adapt it uniformly
+ def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject =
+ new SimpleJavaFileObject(uri(name), kind) {
+ override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get)
+ // if non-null, ClassWriter wrongly requires scheme non-null
+ override def toUri: URI = null
+ override def getName: String = name
+ // suppress
+ override def getLastModified: Long = -1L
+ }
+ override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject =
+ location match {
+ case CLASS_PATH => managedFile(className, kind)
+ case _ => null
+ }
+ override def hasLocation(location: Location): Boolean =
+ location match {
+ case CLASS_PATH => true
+ case _ => false
+ }
+ }
+ def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
+
+ // show tool messages and tool output, with output massage
+ def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
+
+ // eventually, use the tool interface
+ def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
+ //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
+ //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
+ import JavaConverters.asJavaIterableConverter
+ TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
+ .orFailed (throw new IllegalStateException)
+ }
+ // a result per input
+ private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
+ Try {
+ task(options, Seq(claas), inputs).call()
+ } map {
+ case true => JpResult(showable(raw, claas))
+ case _ => JpResult(reporter.reportable(raw))
+ } recoverWith {
+ case e: java.lang.reflect.InvocationTargetException => e.getCause match {
+ case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
+ case x => Failure(x)
+ }
+ } lastly {
+ reporter.clear()
+ }
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
+ case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ object JavapTool {
+ // >= 1.7
+ val Tool = "com.sun.tools.javap.JavapTask"
+
+ // < 1.7
+ val Env = "sun.tools.javap.JavapEnvironment"
+ val Printer = "sun.tools.javap.JavapPrinter"
+ // "documentation"
+ type FakeEnvironment = AnyRef
+ type FakePrinter = AnyRef
+
+ // support JavapEnvironment
+ class JpOptions {
+ private object Access {
+ final val PRIVATE = 0
+ final val PROTECTED = 1
+ final val PACKAGE = 2
+ final val PUBLIC = 3
+ }
+ private val envActionMap: Map[String, (String, Any)] = {
+ val map = Map(
+ "-l" -> (("showLineAndLocal", true)),
+ "-c" -> (("showDisassembled", true)),
+ "-s" -> (("showInternalSigs", true)),
+ "-verbose" -> (("showVerbose", true)),
+ "-private" -> (("showAccess", Access.PRIVATE)),
+ "-package" -> (("showAccess", Access.PACKAGE)),
+ "-protected" -> (("showAccess", Access.PROTECTED)),
+ "-public" -> (("showAccess", Access.PUBLIC)),
+ "-all" -> (("showallAttr", true))
+ )
+ map ++ List(
+ "-v" -> map("-verbose"),
+ "-p" -> map("-private")
+ )
+ }
+ def apply(opts: Seq[String]): Seq[(String, Any)] = {
+ opts flatMap { opt =>
+ envActionMap get opt match {
+ case Some(pair) => List(pair)
+ case _ =>
+ val charOpts = opt.tail.toSeq map ("-" + _)
+ if (charOpts forall (envActionMap contains _))
+ charOpts map envActionMap
+ else Nil
+ }
+ }
+ }
+ }
+
+ case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
+
+ object ToolArgs {
+ def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
+ case ((t,others), s) => s match {
+ case "-fun" => (t copy (fun=true), others)
+ case "-app" => (t copy (app=true), others)
+ case "-help" => (t copy (help=true), others)
+ case "-raw" => (t copy (raw=true), others)
+ case _ => (t, others :+ s)
+ }
+ }
+ }
+
+ val helps = List(
+ "usage" -> ":javap [opts] [path or class or -]...",
+ "-help" -> "Prints this help message",
+ "-raw" -> "Don't unmangle REPL names",
+ "-app" -> "Show the DelayedInit body of Apps",
+ "-fun" -> "Show anonfuns for class or Class#method",
+ "-verbose/-v" -> "Stack size, number of locals, method args",
+ "-private/-p" -> "Private classes and members",
+ "-package" -> "Package-private classes and members",
+ "-protected" -> "Protected classes and members",
+ "-public" -> "Public classes and members",
+ "-l" -> "Line and local variable tables",
+ "-c" -> "Disassembled code",
+ "-s" -> "Internal type signatures",
+ "-sysinfo" -> "System info of class",
+ "-constants" -> "Static final constants"
+ )
+
+ // match prefixes and unpack opts, or -help on failure
+ def massage(arg: String): Seq[String] = {
+ require(arg startsWith "-")
+ // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
+ val r = """(-[^/]*)(/(-.))?""".r
+ def maybe(opt: String, s: String): Option[String] = opt match {
+ // disambiguate by preferring short form
+ case r(lf,_,sf) if s == sf => Some(sf)
+ case r(lf,_,sf) if lf startsWith s => Some(lf)
+ case _ => None
+ }
+ def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
+ // one candidate or one single-char candidate
+ def uniqueOf(maybes: Seq[String]) = {
+ def single(s: String) = s.length == 2
+ if (maybes.length == 1) maybes
+ else if ((maybes count single) == 1) maybes filter single
+ else Nil
+ }
+ // each optchar must decode to exactly one option
+ def unpacked(s: String): Try[Seq[String]] = {
+ val ones = (s drop 1) map { c =>
+ val maybes = uniqueOf(candidates(s"-$c"))
+ if (maybes.length == 1) Some(maybes.head) else None
+ }
+ Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
+ }
+ val res = uniqueOf(candidates(arg))
+ if (res.nonEmpty) res
+ else (unpacked(arg)
+ getOrElse (Seq("-help"))) // or else someone needs help
+ }
+
+ def helper(pw: PrintWriter) = new Showable {
+ def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
+ }
+
+ val DefaultOptions = List("-protected", "-verbose")
+
+ def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
+
+ private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
+
+ private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+
+ def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+ }
+}
+
+object JavapClass {
+ def apply(
+ loader: ScalaClassLoader = ScalaClassLoader.appLoader,
+ printWriter: PrintWriter = new PrintWriter(System.out, true),
+ intp: Option[IMain] = None
+ ) = new JavapClass(loader, printWriter, intp)
+
+ // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
+ // or a resource path com/acme/Widget.class; but not widget.out
+ implicit class MaybeClassLike(val s: String) extends AnyVal {
+ /* private[this] final val suffix = ".class" */
+ private def suffix = ".class"
+ def asClassName = (s stripSuffix suffix).replace('/', '.')
+ def asClassResource = if (s endsWith suffix) s else s.replace('.', '/') + suffix
+ def splitSuffix: (String, String) = if (s endsWith suffix) (s dropRight suffix.length, suffix) else (s, "")
+ def strippingSuffix(f: String => String): String =
+ if (s endsWith suffix) f(s dropRight suffix.length) else s
+ // e.g. Foo#bar. Foo# yields zero-length member part.
+ def splitHashMember: (String, Option[String]) = {
+ val i = s lastIndexOf '#'
+ if (i < 0) (s, None)
+ //else if (i >= s.length - 1) (s.init, None)
+ else (s take i, Some(s drop i+1))
+ }
+ }
+ implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+ private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
+ def parents: List[ClassLoader] = parentsOf(cl)
+ /* all file locations */
+ def locations = {
+ def alldirs = parents flatMap (_ match {
+ case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
+ case jcl: java.net.URLClassLoader => jcl.getURLs
+ case _ => Nil
+ })
+ val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
+ dirs
+ }
+ /* only the file location from which the given class is loaded */
+ def locate(k: String): Option[Path] = {
+ Try {
+ val claas = try cl loadClass k catch {
+ case _: NoClassDefFoundError => null // let it snow
+ }
+ // cf ScalaClassLoader.originOfClass
+ claas.getProtectionDomain.getCodeSource.getLocation
+ } match {
+ case Success(null) => None
+ case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
+ case _ => None
+ }
+ }
+ /* would classBytes succeed with a nonempty array */
+ def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+ }
+ implicit class PathOps(val p: Path) extends AnyVal {
+ import scala.tools.nsc.io.Jar
+ def isJar = Jar isJarOrZip p
+ }
+ implicit class URLOps(val url: URL) extends AnyVal {
+ def isFile: Boolean = url.getProtocol == "file"
+ }
+ object FunFinder {
+ def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
+ }
+ class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+
+ // class k, candidate f without prefix
+ def isFunOfClass(k: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun").r
+ (p findPrefixOf f).nonEmpty
+ }
+ // class k, candidate f without prefix, method m
+ def isFunOfMethod(k: String, m: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun\\$$${Pattern quote m}\\$$").r
+ (p findPrefixOf f).nonEmpty
+ }
+ def isFunOfTarget(k: String, m: Option[String], f: String) =
+ if (m.isEmpty) isFunOfClass(k, f)
+ else isFunOfMethod(k, m.get, f)
+ def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
+ for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
+ }
+ // path prefix p, class k, dir d
+ def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
+ val subdir = Path(p)
+ for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+ yield f.name
+ }
+ // path prefix p, class k, jar file f
+ def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+ import java.util.jar.JarEntry
+ import scala.tools.nsc.io.Jar
+ def maybe(e: JarEntry) = {
+ val (path, name) = {
+ val parts = e.getName split "/"
+ if (parts.length < 2) ("", e.getName)
+ else (parts.init mkString "/", parts.last)
+ }
+ if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+ }
+ (new Jar(f) map maybe).flatten
+ }
+ def loadable(name: String) = loader resourceable name
+ // translated class, optional member, opt member to filter on, whether it is repl output
+ def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+ val (k0, m0) = s.splitHashMember
+ val k = k0.asClassName
+ val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
+ val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
+ // class is either something replish or available to loader
+ // $line.$read$$etc$Foo#member
+ ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+ // s = "f" and $line.$read$$etc$#f is what we're after,
+ // ignoring any #member (except take # as filter on #apply)
+ orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
+ getOrElse (k, member, filter, false))
+ }
+ /** Find the classnames of anonfuns associated with k,
+ * where k may be an available class or a symbol in scope.
+ */
+ def funsOf(k0: String): Seq[String] = {
+ // class is either something replish or available to loader
+ val (k, member, filter, isReplish) = translate(k0)
+ val splat = k split "\\."
+ val name = splat.last
+ val prefix = if (splat.length > 1) splat.init mkString "/" else ""
+ val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ // reconstitute an anonfun with a package
+ // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
+ def packaged(s: String) = {
+ val p = if (pkg.isEmpty) s else s"$pkg.$s"
+ val pm = filter map (p + "#" + _)
+ pm getOrElse p
+ }
+ // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
+ val fs = if (isReplish) {
+ def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
+ if (p.isEmpty) Option(d)
+ else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
+ }
+ outed(intp.get.replOutput.dir, splat.init) map { d =>
+ listFunsInAbsFile(name, member, d) map packaged
+ }
+ } else {
+ loader locate k map { w =>
+ if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
+ else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
+ else Nil
+ }
+ }
+ fs match {
+ case Some(xs) => xs.to[Seq] // maybe empty
+ case None => Seq() // nothing found, e.g., junk input
+ }
+ }
+ def funs(ks: Seq[String]) = ks flatMap funsOf _
+ }
+}
+
+object Javap {
+ def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
+
+ def apply(path: String): Unit = apply(Seq(path))
+ def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
+
+ trait Showable {
+ def show(): Unit
+ }
+
+ sealed trait JpResult extends scala.tools.util.JpResult {
+ type ResultType
+ def isError: Boolean
+ def value: ResultType
+ def show(): Unit
+ // todo
+ // def header(): String
+ // def fields(): List[String]
+ // def methods(): List[String]
+ // def signatures(): List[String]
+ }
+ object JpResult {
+ def apply(msg: String) = new JpError(msg)
+ def apply(res: Showable) = new JpSuccess(res)
+ }
+ class JpError(msg: String) extends JpResult {
+ type ResultType = String
+ def isError = true
+ def value = msg
+ def show() = println(msg) // makes sense for :javap, less for -Ygen-javap
+ }
+ class JpSuccess(val value: Showable) extends JpResult {
+ type ResultType = AnyRef
+ def isError = false
+ def show() = value.show() // output to tool's PrintWriter
+ }
+ implicit class Lastly[A](val t: Try[A]) extends AnyVal {
+ private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/repl/scala/tools/nsc/interpreter/Logger.scala
index 7407daf8d0..7407daf8d0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Logger.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 4bba27b714..4bba27b714 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index 84a47311e2..84a47311e2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
index 627a881cae..a0af72940a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
@@ -10,6 +10,7 @@ import NamedParam._
import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
+import scala.tools.nsc.typechecker.{ TypeStrings }
trait NamedParamCreator {
protected def freshName: () => String
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 57f3675ada..57f3675ada 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
index 672a6fd28f..672a6fd28f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
index f5db3d9e3a..f5db3d9e3a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
index f625124e70..f625124e70 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index e517a16b32..e517a16b32 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
index 3392ea0b5e..3392ea0b5e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplDir.scala b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
index 5d386b47b7..5d386b47b7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplDir.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
index 0eabd84234..51fab3082e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -23,6 +23,14 @@ trait ReplGlobal extends Global {
override lazy val analyzer = new {
val global: ReplGlobal.this.type = ReplGlobal.this
} with Analyzer {
+
+ override protected def findMacroClassLoader(): ClassLoader = {
+ val loader = super.findMacroClassLoader
+ macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs))
+ val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get
+ new util.AbstractFileClassLoader(virtualDirectory, loader) {}
+ }
+
override def newTyper(context: Context): Typer = new Typer(context) {
override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
val res = super.typed(tree, mode, pt)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index 2364918494..2364918494 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index b20166d070..b20166d070 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index 08472bbc64..08472bbc64 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
index ea100b25f2..ea100b25f2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
index e400906a58..e400906a58 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
index 36cdf65510..36cdf65510 100644
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index 2d0917d91f..2d0917d91f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
new file mode 100644
index 0000000000..ebbb397a0c
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.tools.reflect.StdTags
+import scala.reflect.runtime.{ universe => ru }
+
+trait StdReplTags extends StdTags {
+ lazy val tagOfStdReplVals = tagOfStaticClass[StdReplVals]
+ lazy val tagOfIMain = tagOfStaticClass[IMain]
+}
+
+object StdReplTags extends StdTags with StdReplTags {
+ val u: ru.type = ru
+ val m = u.runtimeMirror(getClass.getClassLoader)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 52a085080b..52a085080b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
index dddfb1b8f6..dddfb1b8f6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala
index 794d41adc7..794d41adc7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
index 18e0ee7c85..18e0ee7c85 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 89998e438a..89998e438a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
index c62cf21151..c62cf21151 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index fd6d637212..fd6d637212 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index 52a0c20a11..52a0c20a11 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index b4d2adaad4..b4d2adaad4 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index b753e84426..6dc3e5a62b 100644
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -42,7 +42,7 @@ class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(setti
*/
def docUnit(code: String) = {
val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
+ val scanner = newUnitParser(unit)
scanner.compilationUnit()
}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index f9b9eecdb3..f9b9eecdb3 100644
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
new file mode 100644
index 0000000000..5ad50445a8
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -0,0 +1,234 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocAnalyzer extends Analyzer {
+ val global : Global // generally, a ScaladocGlobal
+ import global._
+
+ override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper
+
+ trait ScaladocTyper extends Typer {
+ private def unit = context.unit
+
+ override def canAdaptConstantTypeToLiteral = false
+
+ override protected def macroImplementationNotFoundMessage(name: Name): String = (
+ super.macroImplementationNotFoundMessage(name)
+ + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ )
+
+ override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
+ val sym = docDef.symbol
+
+ if ((sym ne null) && (sym ne NoSymbol)) {
+ val comment = docDef.comment
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_ => typer1 defineUseCases useCase) match {
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ }
+ }
+ }
+
+ super.typedDocDef(docDef, mode, pt)
+ }
+
+ def defineUseCases(useCase: UseCase): List[Symbol] = {
+ def stringParser(str: String): syntaxAnalyzer.Parser = {
+ val file = new BatchSourceFile(context.unit.source.file, str) {
+ override def positionInUltimateSource(pos: Position) = {
+ pos.withSource(context.unit.source, useCase.pos.start)
+ }
+ }
+ newUnitParser(new CompilationUnit(file))
+ }
+
+ val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
+ val enclClass = context.enclClass.owner
+
+ def defineAlias(name: Name) = (
+ if (context.scope.lookup(name) == NoSymbol) {
+ lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
+ silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
+ val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
+ val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
+ val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ alias setInfo newInfo
+ context.scope.enter(alias)
+ }
+ }
+ }
+ )
+
+ for (tree <- trees; t <- tree)
+ t match {
+ case Ident(name) if name startsWith '$' => defineAlias(name)
+ case _ =>
+ }
+
+ useCase.aliases = context.scope.toList
+ namer.enterSyms(trees)
+ typedStats(trees, NoSymbol)
+ useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
+
+ if (settings.debug.value)
+ useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
+
+ useCase.defined
+ }
+ }
+}
+
+abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
+ import global._
+
+ class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
+ override val in = new ScaladocJavaUnitScanner(unit)
+ } with JavaUnitParser(unit) { }
+
+ class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
+ /** buffer for the documentation comment
+ */
+ var docBuffer: StringBuilder = null
+
+ /** add the given character to the documentation buffer
+ */
+ protected def putDocChar(c: Char) {
+ if (docBuffer ne null) docBuffer.append(c)
+ }
+
+ override protected def skipComment(): Boolean = {
+ if (in.ch == '/') {
+ do {
+ in.next
+ } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
+ true
+ } else if (in.ch == '*') {
+ docBuffer = null
+ in.next
+ val scalaDoc = ("/**", "*/")
+ if (in.ch == '*')
+ docBuffer = new StringBuilder(scalaDoc._1)
+ do {
+ do {
+ if (in.ch != '*' && in.ch != SU) {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '*' && in.ch != SU)
+ while (in.ch == '*') {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '/' && in.ch != SU)
+ if (in.ch == '/') in.next
+ else incompleteInputError("unclosed comment")
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
+
+ private var docBuffer: StringBuilder = null // buffer for comments
+ private var docPos: Position = NoPosition // last doc comment position
+ private var inDocComment = false
+
+ override def discardDocBuffer() = {
+ val doc = flushDoc
+ if (doc ne null)
+ unit.warning(docPos, "discarding unmoored doc comment")
+ }
+
+ override def flushDoc(): DocComment = {
+ if (docBuffer eq null) null
+ else try DocComment(docBuffer.toString, docPos) finally docBuffer = null
+ }
+
+ override protected def putCommentChar() {
+ if (inDocComment)
+ docBuffer append ch
+
+ nextChar()
+ }
+ override def skipDocComment(): Unit = {
+ inDocComment = true
+ docBuffer = new StringBuilder("/**")
+ super.skipDocComment()
+ }
+ override def skipBlockComment(): Unit = {
+ inDocComment = false
+ docBuffer = new StringBuilder("/*")
+ super.skipBlockComment()
+ }
+ override def skipComment(): Boolean = {
+ super.skipComment() && {
+ if (docBuffer ne null) {
+ if (inDocComment)
+ foundDocComment(docBuffer.toString, offset, charOffset - 2)
+ else
+ try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null
+ }
+ true
+ }
+ }
+ def foundComment(value: String, start: Int, end: Int) {
+ val pos = new RangePosition(unit.source, start, start, end)
+ unit.comment(pos, value)
+ }
+ def foundDocComment(value: String, start: Int, end: Int) {
+ docPos = new RangePosition(unit.source, start, start, end)
+ unit.comment(docPos, value)
+ }
+ }
+ class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
+ override def newScanner() = new ScaladocUnitScanner(unit, patches)
+ override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches)
+
+ override def joinComment(trees: => List[Tree]): List[Tree] = {
+ val doc = in.flushDoc
+ if ((doc ne null) && doc.raw.length > 0) {
+ log(s"joinComment(doc=$doc)")
+ val joined = trees map {
+ t =>
+ DocDef(doc, t) setPos {
+ if (t.pos.isDefined) {
+ val pos = doc.pos.withEnd(t.pos.endOrPoint)
+ // always make the position transparent
+ pos.makeTransparent
+ } else {
+ t.pos
+ }
+ }
+ }
+ joined.find(_.pos.isOpaqueRange) foreach {
+ main =>
+ val mains = List(main)
+ joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+ }
+ joined
+ }
+ else trees
+ }
+ }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
new file mode 100644
index 0000000000..20f24dc753
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocGlobalTrait extends Global {
+ outer =>
+
+ override val useOffsetPositions = false
+ override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil)
+
+ override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) {
+ val runsAfter = List[String]()
+ val runsRightAfter = None
+ }
+ override lazy val loaders = new SymbolLoaders {
+ val global: outer.type = outer
+
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ override protected def signalError(root: Symbol, ex: Throwable) {
+ log(s"Suppressing error involving $root: $ex")
+ }
+ }
+}
+
+class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
+ override protected def computeInternalPhases() {
+ phasesSet += syntaxAnalyzer
+ phasesSet += analyzer.namerFactory
+ phasesSet += analyzer.packageObjects
+ phasesSet += analyzer.typerFactory
+ }
+ override def forScaladoc = true
+ override lazy val analyzer = new {
+ val global: ScaladocGlobal.this.type = ScaladocGlobal.this
+ } with ScaladocAnalyzer
+}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 90b94e1336..90b94e1336 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
index 9447e36610..9447e36610 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
index 11520c810e..11520c810e 100644
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index 2064d86860..2064d86860 100755
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
index c11179800c..c11179800c 100755
--- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index 8d80333195..8d80333195 100755
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
index 2a07547de2..2a07547de2 100755
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
index a3d05ae50b..a3d05ae50b 100644
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
index 42b56aa927..42b56aa927 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
index 0cdd47182f..0cdd47182f 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
index ee8b7809e5..ee8b7809e5 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
index 21c5f6bb67..21c5f6bb67 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index d721a96ad7..d721a96ad7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 229e26d699..229e26d699 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index 91939cf3de..91939cf3de 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index 5781e680dd..5781e680dd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index c034647320..c034647320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
index e3c94505ab..e3c94505ab 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index a74c2eedbd..a74c2eedbd 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
index 37145756d9..37145756d9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 63509de4b5..63509de4b5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
index 61c1819d11..61c1819d11 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
index ec00cace75..ec00cace75 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 837d9e6f21..837d9e6f21 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
index 2fa1bf62f3..2fa1bf62f3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
index 7229603ae5..7229603ae5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
index b2f2935dc9..b2f2935dc9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
index 97edbd49db..97edbd49db 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
index cb1f638a58..cb1f638a58 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
index 9d7aec792b..9d7aec792b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
index 5dd6e38d2e..5dd6e38d2e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
index 2e3f5ea530..2e3f5ea530 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
index 4be145d0af..4be145d0af 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
index 69038337a7..69038337a7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
index 36c43be3a2..36c43be3a2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
index 5fe33f72f5..5fe33f72f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 478f2e38ac..478f2e38ac 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index 0e8c893315..0e8c893315 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
index 4d740f3b17..4d740f3b17 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
index b9b49076a6..b9b49076a6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index f127e35b48..f127e35b48 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
index 63a1ae8349..63a1ae8349 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
index 542ba4aa5a..542ba4aa5a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
index b5075c16cd..b5075c16cd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
index d613cf5633..d613cf5633 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
index ae2f85823b..ae2f85823b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
index a0d93f4844..a0d93f4844 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
index 55fb370a41..55fb370a41 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index 70073b272a..70073b272a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
index faab0cf1a3..faab0cf1a3 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
index bc3fbc81b2..bc3fbc81b2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4dd48675b7..4dd48675b7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
index 4688d633fe..4688d633fe 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
index 9b32288e04..9b32288e04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
index fd0ad06e81..fd0ad06e81 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
index ad312793ea..ad312793ea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
index 67ffca79de..67ffca79de 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
index 6e9f2f743f..6e9f2f743f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
index c777bfce8d..c777bfce8d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
index 848dd5963a..848dd5963a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
index 34a04249ee..34a04249ee 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
index 2ed33b0aa4..2ed33b0aa4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
index 6ea17ac320..6ea17ac320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
index 529aa93188..529aa93188 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
index 00c3378a2a..00c3378a2a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
index d30dbad858..d30dbad858 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
index 7d64b9c5c5..7d64b9c5c5 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
index 4625f9df74..4625f9df74 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
index 3764f82ccb..3764f82ccb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
index 4417f5b438..4417f5b438 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
index bc29efb3e6..bc29efb3e6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
index 8313f4975b..8313f4975b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
index 04eda2f307..04eda2f307 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
index c89765239e..c89765239e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
index bf984ef0ba..bf984ef0ba 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
index a790bb1169..a790bb1169 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
index b6ac4415e4..b6ac4415e4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
index 9aae5ba0aa..9aae5ba0aa 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index b066027f04..b066027f04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 6d1caf6d50..6d1caf6d50 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
index 0af34eca4c..0af34eca4c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
index fb961a2eda..fb961a2eda 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
index 625d9251cb..625d9251cb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
index 88983254ce..88983254ce 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
index d0cd7fd512..d0cd7fd512 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
index 6c6e1fe2f5..6c6e1fe2f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
index 04c8794e92..04c8794e92 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
index d8152529fd..d8152529fd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
index 3b5c47c9e3..3b5c47c9e3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
index ef2615bacc..ef2615bacc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
index 2fcc77b2e8..2fcc77b2e8 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
index d5ac639405..d5ac639405 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
index 2a949311d7..2a949311d7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
index 17d1caeb66..17d1caeb66 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
index 574d6b04f8..574d6b04f8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index 924f203a59..924f203a59 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 1272906df5..1272906df5 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 23259a4ae8..23259a4ae8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 1df725636a..1df725636a 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index 868c2fc3a4..868c2fc3a4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 99e9059d79..99e9059d79 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
index 5b4ec4a40b..5b4ec4a40b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index b972649194..b972649194 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
index cf5c1fb3fb..cf5c1fb3fb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
index f712869a4b..f712869a4b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
index 22580805aa..22580805aa 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
index 150b293b81..150b293b81 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index 6395446d3b..6395446d3b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index ebac25bbe4..ebac25bbe4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
index 3db9f18484..f0a9caac15 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -5,9 +5,10 @@
package scala.tools.partest
+import scala.tools.nsc
import scala.tools.nsc._
import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
+import scala.tools.nsc.doc.{ DocFactory, Universe }
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.diagram._
import scala.tools.nsc.doc.base.comment._
@@ -78,11 +79,11 @@ abstract class ScaladocModelTest extends DirectTest {
System.setErr(prevErr)
}
- private[this] var settings: Settings = null
+ private[this] var settings: doc.Settings = null
// create a new scaladoc compiler
private[this] def newDocFactory: DocFactory = {
- settings = new Settings(_ => ())
+ settings = new doc.Settings(_ => ())
settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think
@@ -96,7 +97,6 @@ abstract class ScaladocModelTest extends DirectTest {
// so we don't get the newSettings warning
override def isDebug = false
-
// finally, enable easy navigation inside the entities
object access {
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index c7b58d70d2..621d318ceb 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,4 +1,5 @@
-Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(2) + Macros.bar(2) * new Macros().quux(4))
^
one error found
diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check
index 76602de17d..379416c179 100644
--- a/test/files/neg/t5753.check
+++ b/test/files/neg/t5753.check
@@ -1,4 +1,5 @@
-Test_2.scala:9: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Test_2.scala:9: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(42))
^
one error found
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index d198f4c324..7a2eb9a588 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -37,17 +37,23 @@ object Test extends InteractiveTest {
prepre + docComment(nTags) + prepost + post
}
-
-
override lazy val compiler = {
prepareSettings(settings)
- new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+ new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait {
outer =>
+
val global: this.type = this
override lazy val analyzer = new {
val global: outer.type = outer
- } with doc.ScaladocAnalyzer
+ } with doc.ScaladocAnalyzer with InteractiveAnalyzer {
+ override def newTyper(context: Context): InteractiveTyper with ScaladocTyper =
+ new Typer(context) with InteractiveTyper with ScaladocTyper
+ }
+
+ override lazy val loaders = new scala.tools.nsc.symtab.SymbolLoaders {
+ val global: outer.type = outer
+ }
def chooseLink(links: List[LinkTo]): LinkTo = links.head
def internalLink(sym: Symbol, site: Symbol) = None
@@ -125,7 +131,7 @@ object Test extends InteractiveTest {
case s: Seq[_] => s exists (existsText(_, text))
case p: Product => p.productIterator exists (existsText(_, text))
}
- val (derived, base) = compiler.ask { () =>
+ val (derived, base) = compiler.ask { () =>
val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
(derived, derived.ancestors(0))
}
diff --git a/test/files/run/t5527.check b/test/scaladoc/run/t5527.check
index 1518168c51..ab2aeb2d67 100644
--- a/test/files/run/t5527.check
+++ b/test/scaladoc/run/t5527.check
@@ -1,3 +1,12 @@
+newSource1:17: warning: discarding unmoored doc comment
+ /** Testing 123 */
+ ^
+newSource1:27: warning: discarding unmoored doc comment
+ /** Calculate this result. */
+ ^
+newSource1:34: warning: discarding unmoored doc comment
+ /** Another digit is a giveaway. */
+ ^
[[syntax trees at end of parser]] // newSource1
package <empty> {
object UselessComments extends scala.AnyRef {
diff --git a/test/files/run/t5527.scala b/test/scaladoc/run/t5527.scala
index 2449ff60c3..2449ff60c3 100644
--- a/test/files/run/t5527.scala
+++ b/test/scaladoc/run/t5527.scala
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
index 5aef38e00a..37f6947aaa 100644
--- a/test/scaladoc/scalacheck/IndexScriptTest.scala
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -35,7 +35,7 @@ object Test extends Properties("IndexScript") {
}
property("allPackages") = {
- createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.allPackages.map(_.toString) == List(
"scala",
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index bf385898fc..dc4ab126d4 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -56,7 +56,7 @@ object Test extends Properties("Index") {
}
property("path") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.path == List("index.html")
case None => false
@@ -64,7 +64,7 @@ object Test extends Properties("Index") {
}
property("title") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.title == ""
@@ -72,7 +72,7 @@ object Test extends Properties("Index") {
}
}
property("browser contants a script element") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
(index.browser \ "script").size == 1
diff --git a/tools/stability-test.sh b/tools/stability-test.sh
new file mode 100755
index 0000000000..f017ac0842
--- /dev/null
+++ b/tools/stability-test.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+
+declare failed
+
+echo "Comparing build/quick/classes and build/strap/classes"
+for dir in library reflect compiler; do
+ # feel free to replace by a more elegant approach -- don't know how
+ if diff -rw -x '*.css' \
+ -x '*.custom' \
+ -x '*.gif' \
+ -x '*.js' \
+ -x '*.layout' \
+ -x '*.png' \
+ -x '*.properties' \
+ -x '*.tmpl' \
+ -x '*.tooltip' \
+ -x '*.txt' \
+ -x '*.xml' \
+ build/{quick,strap}/classes/$dir
+ then
+ classes=$(find build/quick/classes/$dir -name '*.class' | wc -l)
+ printf "%8s: %5d classfiles verified identical\n" $dir $classes
+ else
+ failed=true
+ fi
+done
+
+[[ -z $failed ]] || exit 127