summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build/genprod.scala27
-rw-r--r--src/build/maven/maven-deploy.xml9
-rw-r--r--src/build/pack.xml3
-rw-r--r--src/compiler/rootdoc.txt6
-rw-r--r--src/compiler/scala/reflect/internal/AnnotationCheckers.scala1
-rw-r--r--src/compiler/scala/reflect/internal/AnnotationInfos.scala25
-rw-r--r--src/compiler/scala/reflect/internal/BaseTypeSeqs.scala51
-rw-r--r--src/compiler/scala/reflect/internal/ClassfileConstants.scala81
-rw-r--r--src/compiler/scala/reflect/internal/Constants.scala4
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala706
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala77
-rw-r--r--src/compiler/scala/reflect/internal/HasFlags.scala3
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala342
-rw-r--r--src/compiler/scala/reflect/internal/InfoTransformers.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala221
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala70
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala217
-rw-r--r--src/compiler/scala/reflect/internal/Scopes.scala52
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala546
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala33
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala820
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala66
-rw-r--r--src/compiler/scala/reflect/internal/TreePrinters.scala45
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala53
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala2170
-rw-r--r--src/compiler/scala/reflect/internal/pickling/UnPickler.scala26
-rw-r--r--src/compiler/scala/reflect/internal/settings/MutableSettings.scala1
-rw-r--r--src/compiler/scala/reflect/internal/transform/Erasure.scala4
-rw-r--r--src/compiler/scala/reflect/internal/util/Collections.scala158
-rw-r--r--src/compiler/scala/reflect/internal/util/Origins.scala (renamed from src/compiler/scala/tools/nsc/util/Origins.scala)16
-rw-r--r--src/compiler/scala/reflect/runtime/ConversionUtil.scala48
-rw-r--r--src/compiler/scala/reflect/runtime/JavaToScala.scala155
-rw-r--r--src/compiler/scala/reflect/runtime/Loaders.scala8
-rw-r--r--src/compiler/scala/reflect/runtime/Mirror.scala28
-rw-r--r--src/compiler/scala/reflect/runtime/ScalaToJava.scala14
-rw-r--r--src/compiler/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/compiler/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedOps.scala51
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala119
-rw-r--r--src/compiler/scala/reflect/runtime/SynchronizedTypes.scala87
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala135
-rw-r--r--src/compiler/scala/reflect/runtime/TreeBuildUtil.scala14
-rw-r--r--src/compiler/scala/reflect/runtime/Universe.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala4
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala27
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala3
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl4
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala30
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala3
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala8
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala157
-rw-r--r--src/compiler/scala/tools/nsc/MacroContext.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala8
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala55
-rw-r--r--src/compiler/scala/tools/nsc/ast/Reifiers.scala761
-rw-r--r--src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala75
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala57
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala181
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala116
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala253
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala156
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala46
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala63
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala43
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala76
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala139
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala215
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala100
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala37
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Dossiers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala33
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala75
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala96
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala76
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Naming.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala246
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala75
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/MsilFile.scala4
-rw-r--r--src/compiler/scala/tools/nsc/io/NoAbstractFile.scala30
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala24
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala3
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala10
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala15
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala23
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala7
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala178
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala60
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala92
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala223
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala88
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala57
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala62
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala155
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala587
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala51
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala366
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala70
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala1056
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala162
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala33
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala208
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala516
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala223
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala221
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala172
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala254
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala1370
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala187
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala90
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala181
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala124
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1420
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala17
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala26
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala23
-rw-r--r--src/compiler/scala/tools/nsc/util/ProxyReport.scala14
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala4
-rw-r--r--src/compiler/scala/tools/reflect/Mock.scala3
-rw-r--r--src/compiler/scala/tools/util/EditDistance.scala40
-rw-r--r--src/compiler/scala/tools/util/Javap.scala22
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala7
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala45
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala22
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala30
-rw-r--r--src/detach/plugin/scala/tools/detach/Detach.scala4
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java10
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE3
-rw-r--r--src/intellij/scala.iml.SAMPLE10
-rw-r--r--src/library/rootdoc.txt (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt)0
-rw-r--r--src/library/scala/AnyValCompanion.scala2
-rw-r--r--src/library/scala/Function0.scala14
-rw-r--r--src/library/scala/Function1.scala14
-rw-r--r--src/library/scala/Function10.scala4
-rw-r--r--src/library/scala/Function11.scala4
-rw-r--r--src/library/scala/Function12.scala4
-rw-r--r--src/library/scala/Function13.scala4
-rw-r--r--src/library/scala/Function14.scala4
-rw-r--r--src/library/scala/Function15.scala4
-rw-r--r--src/library/scala/Function16.scala4
-rw-r--r--src/library/scala/Function17.scala4
-rw-r--r--src/library/scala/Function18.scala4
-rw-r--r--src/library/scala/Function19.scala4
-rw-r--r--src/library/scala/Function2.scala14
-rw-r--r--src/library/scala/Function20.scala4
-rw-r--r--src/library/scala/Function21.scala4
-rw-r--r--src/library/scala/Function22.scala4
-rw-r--r--src/library/scala/Function3.scala4
-rw-r--r--src/library/scala/Function4.scala4
-rw-r--r--src/library/scala/Function5.scala4
-rw-r--r--src/library/scala/Function6.scala4
-rw-r--r--src/library/scala/Function7.scala4
-rw-r--r--src/library/scala/Function8.scala4
-rw-r--r--src/library/scala/Function9.scala4
-rw-r--r--src/library/scala/MatchingStrategy.scala27
-rw-r--r--src/library/scala/Option.scala7
-rw-r--r--src/library/scala/PartialFunction.scala30
-rw-r--r--src/library/scala/Predef.scala5
-rw-r--r--src/library/scala/Product1.scala4
-rw-r--r--src/library/scala/Product10.scala4
-rw-r--r--src/library/scala/Product11.scala4
-rw-r--r--src/library/scala/Product12.scala4
-rw-r--r--src/library/scala/Product13.scala4
-rw-r--r--src/library/scala/Product14.scala4
-rw-r--r--src/library/scala/Product15.scala4
-rw-r--r--src/library/scala/Product16.scala4
-rw-r--r--src/library/scala/Product17.scala4
-rw-r--r--src/library/scala/Product18.scala4
-rw-r--r--src/library/scala/Product19.scala4
-rw-r--r--src/library/scala/Product2.scala4
-rw-r--r--src/library/scala/Product20.scala4
-rw-r--r--src/library/scala/Product21.scala4
-rw-r--r--src/library/scala/Product22.scala4
-rw-r--r--src/library/scala/Product3.scala4
-rw-r--r--src/library/scala/Product4.scala4
-rw-r--r--src/library/scala/Product5.scala4
-rw-r--r--src/library/scala/Product6.scala4
-rw-r--r--src/library/scala/Product7.scala4
-rw-r--r--src/library/scala/Product8.scala4
-rw-r--r--src/library/scala/Product9.scala4
-rw-r--r--src/library/scala/Specializable.scala29
-rw-r--r--src/library/scala/SpecializableCompanion.scala1
-rw-r--r--src/library/scala/StringContext.scala191
-rw-r--r--src/library/scala/Symbol.scala4
-rw-r--r--src/library/scala/Tuple1.scala2
-rw-r--r--src/library/scala/Tuple10.scala2
-rw-r--r--src/library/scala/Tuple11.scala2
-rw-r--r--src/library/scala/Tuple12.scala2
-rw-r--r--src/library/scala/Tuple13.scala2
-rw-r--r--src/library/scala/Tuple14.scala2
-rw-r--r--src/library/scala/Tuple15.scala2
-rw-r--r--src/library/scala/Tuple16.scala2
-rw-r--r--src/library/scala/Tuple17.scala2
-rw-r--r--src/library/scala/Tuple18.scala2
-rw-r--r--src/library/scala/Tuple19.scala2
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple20.scala2
-rw-r--r--src/library/scala/Tuple21.scala2
-rw-r--r--src/library/scala/Tuple22.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/Tuple4.scala2
-rw-r--r--src/library/scala/Tuple5.scala2
-rw-r--r--src/library/scala/Tuple6.scala2
-rw-r--r--src/library/scala/Tuple7.scala2
-rw-r--r--src/library/scala/Tuple8.scala2
-rw-r--r--src/library/scala/Tuple9.scala2
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala23
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala2
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala2
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala5
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala6
-rw-r--r--src/library/scala/collection/SeqLike.scala5
-rw-r--r--src/library/scala/collection/TraversableLike.scala5
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala2
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala14
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala33
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala2
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala30
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala39
-rw-r--r--src/library/scala/collection/immutable/Range.scala36
-rw-r--r--src/library/scala/collection/immutable/Stream.scala10
-rw-r--r--src/library/scala/collection/interfaces/TraversableOnceMethods.scala2
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala241
-rw-r--r--src/library/scala/collection/mutable/BasicNode.java20
-rw-r--r--src/library/scala/collection/mutable/CNodeBase.java35
-rw-r--r--src/library/scala/collection/mutable/Ctrie.scala1075
-rw-r--r--src/library/scala/collection/mutable/Gen.java (renamed from src/partest-alternative/scala/tools/partest/Properties.scala)17
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala32
-rw-r--r--src/library/scala/collection/mutable/INodeBase.java35
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala50
-rw-r--r--src/library/scala/collection/mutable/MainNode.java40
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala49
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala123
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala10
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala371
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala82
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala65
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala57
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala36
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala20
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala18
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala7
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala23
-rw-r--r--src/library/scala/collection/parallel/mutable/ParCtrie.scala194
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala25
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala31
-rw-r--r--src/library/scala/io/Codec.scala20
-rw-r--r--src/library/scala/math/BigDecimal.scala4
-rw-r--r--src/library/scala/math/BigInt.scala6
-rw-r--r--src/library/scala/package.scala8
-rw-r--r--src/library/scala/reflect/ClassManifest.scala4
-rw-r--r--src/library/scala/reflect/Code.scala23
-rw-r--r--src/library/scala/reflect/Manifest.scala68
-rw-r--r--src/library/scala/reflect/ReflectionUtils.scala12
-rw-r--r--src/library/scala/reflect/api/Mirror.scala34
-rw-r--r--src/library/scala/reflect/api/Modifier.scala83
-rwxr-xr-xsrc/library/scala/reflect/api/Names.scala44
-rwxr-xr-xsrc/library/scala/reflect/api/StandardDefinitions.scala26
-rw-r--r--src/library/scala/reflect/api/StandardNames.scala21
-rwxr-xr-xsrc/library/scala/reflect/api/Symbols.scala172
-rw-r--r--src/library/scala/reflect/api/TreeBuildUtil.scala14
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala29
-rw-r--r--src/library/scala/reflect/api/Trees.scala63
-rwxr-xr-xsrc/library/scala/reflect/api/Types.scala59
-rwxr-xr-xsrc/library/scala/reflect/api/Universe.scala3
-rw-r--r--src/library/scala/reflect/macro/Context.scala36
-rw-r--r--src/library/scala/reflect/package.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala4
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java18
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala11
-rw-r--r--src/library/scala/specialized.scala13
-rw-r--r--src/library/scala/sys/process/BasicIO.scala129
-rw-r--r--src/library/scala/sys/process/Process.scala67
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala306
-rw-r--r--src/library/scala/sys/process/ProcessIO.scala49
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala26
-rw-r--r--src/library/scala/sys/process/package.scala212
-rw-r--r--src/library/scala/util/Properties.scala5
-rw-r--r--src/library/scala/xml/Elem.scala4
-rw-r--r--src/library/scala/xml/MetaData.scala4
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala15
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala2
-rw-r--r--src/library/scala/xml/Utility.scala2
-rw-r--r--src/library/scala/xml/include/sax/Main.scala2
-rw-r--r--src/partest-alternative/README50
-rw-r--r--src/partest-alternative/scala/tools/partest/Actions.scala189
-rw-r--r--src/partest-alternative/scala/tools/partest/Alarms.scala86
-rw-r--r--src/partest-alternative/scala/tools/partest/BuildContributors.scala102
-rw-r--r--src/partest-alternative/scala/tools/partest/Categories.scala70
-rw-r--r--src/partest-alternative/scala/tools/partest/Compilable.scala106
-rw-r--r--src/partest-alternative/scala/tools/partest/Config.scala115
-rw-r--r--src/partest-alternative/scala/tools/partest/Dispatcher.scala162
-rw-r--r--src/partest-alternative/scala/tools/partest/Entities.scala74
-rw-r--r--src/partest-alternative/scala/tools/partest/Housekeeping.scala187
-rw-r--r--src/partest-alternative/scala/tools/partest/Partest.scala81
-rw-r--r--src/partest-alternative/scala/tools/partest/PartestSpec.scala104
-rw-r--r--src/partest-alternative/scala/tools/partest/Results.scala121
-rw-r--r--src/partest-alternative/scala/tools/partest/Runner.scala36
-rw-r--r--src/partest-alternative/scala/tools/partest/Statistics.scala46
-rw-r--r--src/partest-alternative/scala/tools/partest/Universe.scala96
-rw-r--r--src/partest-alternative/scala/tools/partest/ant/JavaTask.scala57
-rw-r--r--src/partest-alternative/scala/tools/partest/antlib.xml3
-rw-r--r--src/partest-alternative/scala/tools/partest/category/AllCategories.scala20
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Analysis.scala64
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Compiler.scala140
-rw-r--r--src/partest-alternative/scala/tools/partest/category/Runner.scala108
-rw-r--r--src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala58
-rw-r--r--src/partest-alternative/scala/tools/partest/io/Diff.java873
-rw-r--r--src/partest-alternative/scala/tools/partest/io/DiffPrint.java606
-rw-r--r--src/partest-alternative/scala/tools/partest/io/JUnitReport.scala38
-rw-r--r--src/partest-alternative/scala/tools/partest/io/Logging.scala137
-rw-r--r--src/partest-alternative/scala/tools/partest/package.scala45
-rw-r--r--src/partest-alternative/scala/tools/partest/util/package.scala61
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala27
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala32
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala50
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala5
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala11
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala2
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala14
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala23
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala32
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala6
-rw-r--r--src/partest/scala/tools/partest/utils/CodeTest.scala35
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala2
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala26
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala20
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Names.scala96
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala7
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala3
-rw-r--r--src/swing/scala/swing/test/ButtonApp.scala25
-rw-r--r--src/swing/scala/swing/test/CelsiusConverter.scala43
-rw-r--r--src/swing/scala/swing/test/CelsiusConverter2.scala37
-rw-r--r--src/swing/scala/swing/test/ComboBoxes.scala87
-rw-r--r--src/swing/scala/swing/test/CountButton.scala31
-rw-r--r--src/swing/scala/swing/test/Dialogs.scala177
-rw-r--r--src/swing/scala/swing/test/GridBagDemo.scala65
-rw-r--r--src/swing/scala/swing/test/HelloWorld.scala14
-rw-r--r--src/swing/scala/swing/test/LabelTest.scala20
-rw-r--r--src/swing/scala/swing/test/LinePainting.scala54
-rw-r--r--src/swing/scala/swing/test/ListViewDemo.scala18
-rw-r--r--src/swing/scala/swing/test/SimpleApplet.scala19
-rw-r--r--src/swing/scala/swing/test/SwingApp.scala30
-rw-r--r--src/swing/scala/swing/test/TableSelection.scala97
-rw-r--r--src/swing/scala/swing/test/UIDemo.scala148
-rw-r--r--src/swing/scala/swing/test/images/banana.jpgbin6000 -> 0 bytes
-rw-r--r--src/swing/scala/swing/test/images/margarita1.jpgbin14770 -> 0 bytes
-rw-r--r--src/swing/scala/swing/test/images/margarita2.jpgbin17310 -> 0 bytes
-rw-r--r--src/swing/scala/swing/test/images/rose.jpgbin13808 -> 0 bytes
413 files changed, 15531 insertions, 13369 deletions
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 9e5b6810c1..a43b5e02c7 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -97,7 +97,7 @@ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object FunctionZero extends Function(0) {
override def genprodString = "\n// genprod generated these sources at: " + new java.util.Date()
override def covariantSpecs = "@specialized "
- override def descriptiveComment = functionNTemplate.format("javaVersion", "anonfun0",
+ override def descriptiveComment = " " + functionNTemplate.format("javaVersion", "anonfun0",
"""
* val javaVersion = () => sys.props("java.version")
*
@@ -111,10 +111,10 @@ object FunctionZero extends Function(0) {
object FunctionOne extends Function(1) {
override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n"
- override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
- override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) "
- override def descriptiveComment = functionNTemplate.format("succ", "anonfun1",
+ override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1",
"""
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
@@ -146,7 +146,7 @@ object FunctionTwo extends Function(2) {
override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) "
override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
- override def descriptiveComment = functionNTemplate.format("max", "anonfun2",
+ override def descriptiveComment = " " + functionNTemplate.format("max", "anonfun2",
"""
* val max = (x: Int, y: Int) => if (x < y) y else x
*
@@ -175,14 +175,20 @@ class Function(val i: Int) extends Group("Function") with Arity {
*
* {{{
* object Main extends App { %s }
- * }}}"""
+ * }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+ """
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
<file name={fileName}>{header}
/** A function of {i} parameter{s}.
- * {descriptiveComment}
+ *{descriptiveComment}
*/
{classAnnotation}trait {className}{contraCoArgs} extends AnyRef {{ self =>
/** Apply the body of this function to the argument{s}.
@@ -211,12 +217,11 @@ class Function(val i: Int) extends Group("Function") with Arity {
)
// f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
- def curryComment = { """
- /** Creates a curried version of this function.
+ def curryComment = {
+"""/** Creates a curried version of this function.
*
* @return a function `f` such that `f%s == apply%s`
- */
-""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
+ */""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
}
def tupleMethod = {
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 9fddca3c73..679f45ab54 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -9,15 +9,12 @@
<!-- Pull in properties from build -->
<property file="build.properties" />
<!-- Initialize specific properties -->
- <!--<property name="remote.snapshot.repository" value="http://scala-tools.org:8081/nexus/content/repositories/snapshots" />
- <property name="remote.release.repository" value="http://scala-tools.org:8081/nexus/content/repositories/releases" />-->
-
- <property name="remote.snapshot.repository" value="http://nexus.scala-tools.org/content/repositories/snapshots" />
- <property name="remote.release.repository" value="http://nexus.scala-tools.org/content/repositories/releases" />
+ <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
+ <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
<property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
<property name="local.release.repository" value="${user.home}/.m2/repository" />
- <property name="repository.credentials.id" value="scala-tools.org" />
+ <property name="repository.credentials.id" value="sonatype-nexus" />
<property name="settings.file" value="${user.home}/.m2/settings.xml" />
<echo>Using server[${repository.credentials.id}] for maven repository credentials.
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 05e49e5e81..d022ac3f05 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -11,6 +11,9 @@ PROPERTIES
============================================================================ -->
<property name="sbaz.universe" value="http://www.scala-lang.org/downloads/packages"/>
+ <property file="${basedir}/build.number.maven"/>
+ <!-- the maven stuff requires version.major, version.minor and version.patch properties.
+ the "get-scala-revision" script only returns "version.number" -->
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt
new file mode 100644
index 0000000000..173f604098
--- /dev/null
+++ b/src/compiler/rootdoc.txt
@@ -0,0 +1,6 @@
+The Scala compiler API.
+
+The following resources are useful for Scala plugin/compiler development:
+ - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]]
+ - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]]
+ - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel
diff --git a/src/compiler/scala/reflect/internal/AnnotationCheckers.scala b/src/compiler/scala/reflect/internal/AnnotationCheckers.scala
index 666c1d74cb..449b0ca0bc 100644
--- a/src/compiler/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/compiler/scala/reflect/internal/AnnotationCheckers.scala
@@ -49,6 +49,7 @@ trait AnnotationCheckers {
def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
}
+ // Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary.
/** The list of annotation checkers that have been registered */
private var annotationCheckers: List[AnnotationChecker] = Nil
diff --git a/src/compiler/scala/reflect/internal/AnnotationInfos.scala b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
index 665e33e783..9a7c79d856 100644
--- a/src/compiler/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/compiler/scala/reflect/internal/AnnotationInfos.scala
@@ -116,6 +116,11 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
// Classfile annot: args empty. Scala annot: assocs empty.
assert(args.isEmpty || assocs.isEmpty, atp)
+ // @xeno.by: necessary for reification, see Reifiers.scala for more info
+ private var orig: Tree = EmptyTree
+ def original = orig
+ def setOriginal(t: Tree): this.type = { orig = t; this }
+
override def toString = (
atp +
(if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
@@ -130,7 +135,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
private var forced = false
private lazy val forcedInfo =
try {
- val result = lazyInfo
+ val result = lazyInfo
if (result.pos == NoPosition) result setPos pos
result
} finally forced = true
@@ -138,10 +143,12 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
def atp: Type = forcedInfo.atp
def args: List[Tree] = forcedInfo.args
def assocs: List[(Name, ClassfileAnnotArg)] = forcedInfo.assocs
+ def original: Tree = forcedInfo.original
+ def setOriginal(t: Tree): this.type = { forcedInfo.setOriginal(t); this }
// We should always be able to print things without forcing them.
override def toString = if (forced) forcedInfo.toString else "@<?>"
-
+
override def pos: Position = if (forced) forcedInfo.pos else NoPosition
}
@@ -166,10 +173,16 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
def args: List[Tree]
def assocs: List[(Name, ClassfileAnnotArg)]
+ // @xeno.by: necessary for reification, see Reifiers.scala for more info
+ def original: Tree
+ def setOriginal(t: Tree): this.type
+
/** Hand rolling Product. */
def _1 = atp
def _2 = args
def _3 = assocs
+ // @xeno.by: original hasn't become a product member for backward compatibility purposes
+ // def _4 = original
def canEqual(other: Any) = other.isInstanceOf[AnnotationInfo]
override def productPrefix = "AnnotationInfo"
@@ -178,7 +191,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
private var rawpos: Position = NoPosition
def pos = rawpos
- def setPos(pos: Position): this.type = {
+ def setPos(pos: Position): this.type = { // Syncnote: Setpos inaccessible to reflection, so no sync in rawpos necessary.
rawpos = pos
this
}
@@ -230,10 +243,8 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable =>
def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
/** Change all ident's with Symbol "from" to instead use symbol "to" */
- def substIdentSyms(from: Symbol, to: Symbol) = {
- val subs = new TreeSymSubstituter(List(from), List(to))
- AnnotationInfo(atp, args.map(subs(_)), assocs).setPos(pos)
- }
+ def substIdentSyms(from: Symbol, to: Symbol) =
+ AnnotationInfo(atp, args map (_ substTreeSyms (from -> to)), assocs) setPos pos
def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
index 38277b5a09..9e5c93753f 100644
--- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala
@@ -29,7 +29,14 @@ trait BaseTypeSeqs {
this: SymbolTable =>
import definitions._
- class BaseTypeSeq(parents: List[Type], elems: Array[Type]) {
+ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ new BaseTypeSeq(parents, elems)
+
+ /** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
+ * This is necessary because when run from reflection every base type sequence needs to have a
+ * SynchronizedBaseTypeSeq as mixin.
+ */
+ class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
incCounter(baseTypeSeqCount)
incCounter(baseTypeSeqLenTotal, elems.length)
@@ -41,7 +48,7 @@ trait BaseTypeSeqs {
// (while NoType is in there to indicate a cycle in this BTS, during the execution of
// the mergePrefixAndArgs below, the elems get copied without the pending map,
// so that NoType's are seen instead of the original type --> spurious compile error)
- val pending = new mutable.BitSet(length)
+ private val pending = new mutable.BitSet(length)
/** The type at i'th position in this sequence; lazy types are returned evaluated. */
def apply(i: Int): Type =
@@ -89,11 +96,11 @@ trait BaseTypeSeqs {
/** Return all evaluated types in this sequence as a list */
def toList: List[Type] = elems.toList
- protected def copy(head: Type, offset: Int): BaseTypeSeq = {
+ def copy(head: Type, offset: Int): BaseTypeSeq = {
val arr = new Array[Type](elems.length + offset)
compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
arr(0) = head
- new BaseTypeSeq(parents, arr)
+ newBaseTypeSeq(parents, arr)
}
/** Compute new base type sequence with `tp` prepended to this sequence */
@@ -113,21 +120,10 @@ trait BaseTypeSeqs {
arr(i) = f(elems(i))
i += 1
}
- new BaseTypeSeq(parents, arr)
+ newBaseTypeSeq(parents, arr)
}
- def lateMap(f: Type => Type): BaseTypeSeq = new BaseTypeSeq(parents map f, elems) {
- override def apply(i: Int) = f(self.apply(i))
- override def rawElem(i: Int) = f(self.rawElem(i))
- override def typeSymbol(i: Int) = self.typeSymbol(i)
- override def toList = self.toList map f
- override protected def copy(head: Type, offset: Int) = (self map f).copy(head, offset)
- override def map(g: Type => Type) = lateMap(g)
- override def lateMap(g: Type => Type) = self.lateMap(x => g(f(x)))
- override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
- override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max
- override def toString = elems.mkString("MBTS(", ",", ")")
- }
+ def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f)
def exists(p: Type => Boolean): Boolean = elems exists p
@@ -177,10 +173,10 @@ trait BaseTypeSeqs {
/** A merker object for a base type sequence that's no yet computed.
* used to catch inheritance cycles
*/
- val undetBaseTypeSeq: BaseTypeSeq = new BaseTypeSeq(List(), Array())
+ val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array())
/** Create a base type sequence consisting of a single type */
- def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = new BaseTypeSeq(List(), Array(tp))
+ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp))
/** Create the base type sequence of a compound type wuth given tp.parents */
def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = {
@@ -244,8 +240,21 @@ trait BaseTypeSeqs {
val elems = new Array[Type](btsSize)
buf.copyToArray(elems, 0)
// Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG
- new BaseTypeSeq(parents, elems)
+ newBaseTypeSeq(parents, elems)
}
-
+
+ class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) {
+ override def apply(i: Int) = f(orig.apply(i))
+ override def rawElem(i: Int) = f(orig.rawElem(i))
+ override def typeSymbol(i: Int) = orig.typeSymbol(i)
+ override def toList = orig.toList map f
+ override def copy(head: Type, offset: Int) = (orig map f).copy(head, offset)
+ override def map(g: Type => Type) = lateMap(g)
+ override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x)))
+ override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
+ override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max
+ override def toString = elems.mkString("MBTS(", ",", ")")
+ }
+
val CyclicInheritance = new Throwable
}
diff --git a/src/compiler/scala/reflect/internal/ClassfileConstants.scala b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
index 136350ebbb..1c4c007de0 100644
--- a/src/compiler/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/compiler/scala/reflect/internal/ClassfileConstants.scala
@@ -6,6 +6,8 @@
package scala.reflect
package internal
+import annotation.switch
+
object ClassfileConstants {
final val JAVA_MAGIC = 0xCAFEBABE
@@ -86,6 +88,7 @@ object ClassfileConstants {
final val ARRAY_TAG = '['
final val VOID_TAG = 'V'
final val TVAR_TAG = 'T'
+ final val OBJECT_TAG = 'L'
final val ANNOTATION_TAG = '@'
final val SCALA_NOTHING = "scala.runtime.Nothing$"
final val SCALA_NULL = "scala.runtime.Null$"
@@ -326,28 +329,62 @@ object ClassfileConstants {
final val impdep1 = 0xfe
final val impdep2 = 0xff
- def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = {
+ abstract class FlagTranslation {
import Flags._
- var res = 0l
- if ((flags & JAVA_ACC_PRIVATE) != 0)
- res = res | PRIVATE
- else if ((flags & JAVA_ACC_PROTECTED) != 0)
- res = res | PROTECTED
- if ((flags & JAVA_ACC_ABSTRACT) != 0 && (flags & JAVA_ACC_ANNOTATION) == 0)
- res = res | DEFERRED
- if ((flags & JAVA_ACC_FINAL) != 0)
- res = res | FINAL
- if (((flags & JAVA_ACC_INTERFACE) != 0) &&
- ((flags & JAVA_ACC_ANNOTATION) == 0))
- res = res | TRAIT | INTERFACE | ABSTRACT
- if ((flags & JAVA_ACC_SYNTHETIC) != 0)
- res = res | SYNTHETIC
- if ((flags & JAVA_ACC_STATIC) != 0)
- res = res | STATIC
- if (isClass && ((res & DEFERRED) != 0L))
- res = res & ~DEFERRED | ABSTRACT
- if (isField && (res & FINAL) == 0L)
- res = res | MUTABLE
- res | JAVA
+
+ private var isAnnotation = false
+ private var isClass = false
+ private def initFields(flags: Int) = {
+ isAnnotation = (flags & JAVA_ACC_ANNOTATION) != 0
+ isClass = false
+ }
+ private def translateFlag(jflag: Int): Long = (jflag: @switch) match {
+ case JAVA_ACC_PRIVATE => PRIVATE
+ case JAVA_ACC_PROTECTED => PROTECTED
+ case JAVA_ACC_FINAL => FINAL
+ case JAVA_ACC_SYNTHETIC => SYNTHETIC
+ case JAVA_ACC_STATIC => STATIC
+ case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
+ case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
+ case _ => 0L
+ }
+ private def translateFlags(jflags: Int, baseFlags: Long): Long = {
+ var res: Long = JAVA | baseFlags
+ /** fast, elegant, maintainable, pick any two... */
+ res |= translateFlag(jflags & JAVA_ACC_PRIVATE)
+ res |= translateFlag(jflags & JAVA_ACC_PROTECTED)
+ res |= translateFlag(jflags & JAVA_ACC_FINAL)
+ res |= translateFlag(jflags & JAVA_ACC_SYNTHETIC)
+ res |= translateFlag(jflags & JAVA_ACC_STATIC)
+ res |= translateFlag(jflags & JAVA_ACC_ABSTRACT)
+ res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
+ res
+ }
+
+ def classFlags(jflags: Int): Long = {
+ initFields(jflags)
+ isClass = true
+ translateFlags(jflags, 0)
+ }
+ def fieldFlags(jflags: Int): Long = {
+ initFields(jflags)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0)
+ }
+ def methodFlags(jflags: Int): Long = {
+ initFields(jflags)
+ translateFlags(jflags, 0)
+ }
}
+ object FlagTranslation extends FlagTranslation { }
+
+ def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
+ def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
+ def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
+
+ @deprecated("Use another method in this object", "2.10.0")
+ def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
+ if (isClass) toScalaClassFlags(flags)
+ else if (isField) toScalaFieldFlags(flags)
+ else toScalaMethodFlags(flags)
+ )
}
diff --git a/src/compiler/scala/reflect/internal/Constants.scala b/src/compiler/scala/reflect/internal/Constants.scala
index 2edb0d1fe6..c328cc49cb 100644
--- a/src/compiler/scala/reflect/internal/Constants.scala
+++ b/src/compiler/scala/reflect/internal/Constants.scala
@@ -45,7 +45,7 @@ trait Constants extends api.Constants {
case x: Char => CharTag
case x: Type => ClassTag
case x: Symbol => EnumTag
- case _ => throw new Error("bad constant value: " + value)
+ case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass)
}
def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
@@ -217,7 +217,7 @@ trait Constants extends api.Constants {
}
def escapedStringValue: String = {
- def escape(text: String): String = (text map escapedChar) mkString ""
+ def escape(text: String): String = text flatMap escapedChar
tag match {
case NullTag => "null"
case StringTag => "\"" + escape(stringValue) + "\""
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 15f89e1382..b4b0a7335d 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -6,6 +6,7 @@
package scala.reflect
package internal
+import annotation.{ switch }
import scala.collection.{ mutable, immutable }
import Flags._
import PartialFunction._
@@ -13,13 +14,36 @@ import PartialFunction._
trait Definitions extends reflect.api.StandardDefinitions {
self: SymbolTable =>
+ /** Since both the value parameter types and the result type may
+ * require access to the type parameter symbols, we model polymorphic
+ * creation as a function from those symbols to (formal types, result type).
+ * The Option is to distinguish between nullary methods and empty-param-list
+ * methods.
+ */
+ private type PolyMethodCreator = List[Symbol] => (Option[List[Type]], Type)
+
+ private def newClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): Symbol = {
+ val clazz = owner.newClassSymbol(name, NoPosition, flags)
+ clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
+ }
+ private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): Symbol = {
+ val msym = owner.newMethod(name.encode, NoPosition, flags)
+ val params = msym.newSyntheticValueParams(formals)
+ msym setInfoAndEnter MethodType(params, restpe)
+ }
+
// the scala value classes
trait ValueClassDefinitions {
self: definitions.type =>
private[Definitions] def valueCache(name: Name) = {
- if (name.isTypeName) ScalaPackageClass.info member name
- else ScalaPackageClass.info member name suchThat (_ hasFlag MODULE)
+ val res = (
+ if (name.isTypeName) ScalaPackageClass.info member name
+ else ScalaPackageClass.info member name suchThat (_ hasFlag MODULE)
+ )
+ if (res eq NoSymbol)
+ abort("Could not find value classes! This is a catastrophic failure. scala " + scala.util.Properties.versionString)
+ else res
}
private[Definitions] def valueModuleMethod(className: Name, methodName: Name): Symbol = {
valueCache(className.toTermName).moduleClass.tpe member methodName
@@ -47,7 +71,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
tpnme.Double -> DOUBLE_TAG,
tpnme.Boolean -> BOOL_TAG,
tpnme.Unit -> VOID_TAG,
- tpnme.Object -> TVAR_TAG
+ tpnme.Object -> OBJECT_TAG
)
private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
@@ -60,18 +84,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
- lazy val refClass = classesMap(x => getClass("scala.runtime." + x + "Ref"))
- lazy val volatileRefClass = classesMap(x => getClass("scala.runtime.Volatile" + x + "Ref"))
+ lazy val refClass = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
+ lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
lazy val boxMethod = classesMap(x => valueModuleMethod(x, nme.box))
lazy val unboxMethod = classesMap(x => valueModuleMethod(x, nme.unbox))
- private def newClass(owner: Symbol, name: TypeName, parents: List[Type]): Symbol = {
- val clazz = owner.newClass(NoPosition, name)
- clazz.setInfo(ClassInfoType(parents, new Scope, clazz))
- owner.info.decls.enter(clazz)
- clazz
- }
-
def isNumericSubClass(sub: Symbol, sup: Symbol) = (
(numericWeight contains sub)
&& (numericWeight contains sup)
@@ -85,12 +102,6 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isGetClass(sym: Symbol) =
(sym.name == nme.getClass_) && (sym.paramss.isEmpty || sym.paramss.head.isEmpty)
- private[Definitions] def fullNameStrings: List[String] = nme.ScalaValueNames map ("scala." + _)
- private[Definitions] lazy val fullValueName: Set[Name] = {
- val values = nme.ScalaValueNames flatMap (x => List(newTypeName("scala." + x), newTermName("scala." + x)))
- values.toSet + newTypeName("scala.AnyVal")
- }
-
lazy val AnyValClass = valueCache(tpnme.AnyVal)
lazy val UnitClass = valueCache(tpnme.Unit)
lazy val ByteClass = valueCache(tpnme.Byte)
@@ -101,9 +112,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val FloatClass = valueCache(tpnme.Float)
lazy val DoubleClass = valueCache(tpnme.Double)
lazy val BooleanClass = valueCache(tpnme.Boolean)
- def Boolean_and = getMember(BooleanClass, nme.ZAND)
- def Boolean_or = getMember(BooleanClass, nme.ZOR)
- def Boolean_not = getMember(BooleanClass, nme.UNARY_!)
+ lazy val Boolean_and = getMember(BooleanClass, nme.ZAND)
+ lazy val Boolean_or = getMember(BooleanClass, nme.ZOR)
+ lazy val Boolean_not = getMember(BooleanClass, nme.UNARY_!)
def ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
def ScalaValueClasses: List[Symbol] = List(
@@ -117,6 +128,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
FloatClass,
DoubleClass
)
+ def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
}
object definitions extends AbsDefinitions with ValueClassDefinitions {
@@ -129,21 +141,21 @@ trait Definitions extends reflect.api.StandardDefinitions {
// This is the package _root_. The actual root cannot be referenced at
// the source level, but _root_ is essentially a function () => <root>.
lazy val RootPackage: Symbol = {
- val rp = NoSymbol.newValue(NoPosition, nme.ROOTPKG)
- .setFlag(FINAL | MODULE | PACKAGE | JAVA)
- .setInfo(NullaryMethodType(RootClass.tpe))
+ val rp = (
+ NoSymbol.newValue(nme.ROOTPKG, NoPosition, FINAL | MODULE | PACKAGE | JAVA)
+ setInfo NullaryMethodType(RootClass.tpe)
+ )
RootClass.sourceModule = rp
rp
}
// This is the actual root of everything, including the package _root_.
lazy val RootClass: ModuleClassSymbol = (
- NoSymbol.newModuleClass(NoPosition, tpnme.ROOT)
- setFlag (FINAL | MODULE | PACKAGE | JAVA)
+ NoSymbol.newModuleClassSymbol(tpnme.ROOT, NoPosition, FINAL | MODULE | PACKAGE | JAVA)
setInfo rootLoader
)
// The empty package, which holds all top level types without given packages.
- lazy val EmptyPackage = RootClass.newPackage(NoPosition, nme.EMPTY_PACKAGE_NAME).setFlag(FINAL)
+ lazy val EmptyPackage = RootClass.newPackage(nme.EMPTY_PACKAGE_NAME, NoPosition, FINAL)
lazy val EmptyPackageClass = EmptyPackage.moduleClass
lazy val JavaLangPackage = getModule(sn.JavaLang)
@@ -151,8 +163,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val ScalaPackage = getModule(nme.scala_)
lazy val ScalaPackageClass = ScalaPackage.moduleClass
- lazy val RuntimePackage = getModule("scala.runtime")
+ lazy val RuntimePackage = getRequiredModule("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass
+
+ lazy val JavaLangEnumClass = getRequiredClass("java.lang.Enum")
// convenient one-argument parameter lists
lazy val anyparam = List(AnyClass.typeConstructor)
@@ -163,23 +177,54 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def booltype = BooleanClass.typeConstructor
private def inttype = IntClass.typeConstructor
private def stringtype = StringClass.typeConstructor
+
+ // Java types
+ def javaTypeName(jclazz: Class[_]): TypeName = newTypeName(jclazz.getName)
+
+ def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
+ case java.lang.Void.TYPE => UnitClass
+ case java.lang.Byte.TYPE => ByteClass
+ case java.lang.Character.TYPE => CharClass
+ case java.lang.Short.TYPE => ShortClass
+ case java.lang.Integer.TYPE => IntClass
+ case java.lang.Long.TYPE => LongClass
+ case java.lang.Float.TYPE => FloatClass
+ case java.lang.Double.TYPE => DoubleClass
+ case java.lang.Boolean.TYPE => BooleanClass
+ case _ => NoSymbol
+ }
+ def valueClassToJavaType(sym: Symbol): Class[_] = sym match {
+ case UnitClass => java.lang.Void.TYPE
+ case ByteClass => java.lang.Byte.TYPE
+ case CharClass => java.lang.Character.TYPE
+ case ShortClass => java.lang.Short.TYPE
+ case IntClass => java.lang.Integer.TYPE
+ case LongClass => java.lang.Long.TYPE
+ case FloatClass => java.lang.Float.TYPE
+ case DoubleClass => java.lang.Double.TYPE
+ case BooleanClass => java.lang.Boolean.TYPE
+ case _ => null
+ }
// top types
- lazy val AnyClass = newClass(ScalaPackageClass, tpnme.Any, Nil) setFlag (ABSTRACT)
+ lazy val AnyClass = newClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.typeConstructor)
lazy val ObjectClass = getClass(sn.Object)
- lazy val AnyCompanionClass = getClass("scala.AnyCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
- lazy val AnyValCompanionClass = getClass("scala.AnyValCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
+
+ // Note: this is not the type alias AnyRef, it's a companion-like
+ // object used by the @specialize annotation.
+ def AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef)
+ @deprecated("Use AnyRefModule", "2.10.0")
+ def Predef_AnyRef = AnyRefModule
// bottom types
- lazy val RuntimeNothingClass = getClass(ClassfileConstants.SCALA_NOTHING)
- lazy val RuntimeNullClass = getClass(ClassfileConstants.SCALA_NULL)
+ lazy val RuntimeNothingClass = getClass(fulltpnme.RuntimeNothing)
+ lazy val RuntimeNullClass = getClass(fulltpnme.RuntimeNull)
sealed abstract class BottomClassSymbol(name: TypeName, parent: Symbol) extends ClassSymbol(ScalaPackageClass, NoPosition, name) {
locally {
- this setFlag ABSTRACT | TRAIT | FINAL
- this setInfo ClassInfoType(List(parent.tpe), new Scope, this)
- owner.info.decls enter this
+ this initFlags ABSTRACT | TRAIT | FINAL
+ this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
}
final override def isBottomClass = true
}
@@ -194,25 +239,25 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
// exceptions and other throwables
- lazy val ClassCastExceptionClass = getClass("java.lang.ClassCastException")
+ lazy val ClassCastExceptionClass = getRequiredClass("java.lang.ClassCastException")
lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
lazy val InvocationTargetExceptionClass = getClass(sn.InvTargetException)
- lazy val MatchErrorClass = getClass("scala.MatchError")
- lazy val NonLocalReturnControlClass = getClass("scala.runtime.NonLocalReturnControl")
+ lazy val MatchErrorClass = getRequiredClass("scala.MatchError")
+ lazy val NonLocalReturnControlClass = getRequiredClass("scala.runtime.NonLocalReturnControl")
lazy val NullPointerExceptionClass = getClass(sn.NPException)
lazy val ThrowableClass = getClass(sn.Throwable)
- lazy val UninitializedErrorClass = getClass("scala.UninitializedFieldError")
+ lazy val UninitializedErrorClass = getRequiredClass("scala.UninitializedFieldError")
// fundamental reference classes
lazy val ScalaObjectClass = getMember(ScalaPackageClass, tpnme.ScalaObject)
- lazy val PartialFunctionClass = getClass("scala.PartialFunction")
- lazy val AbstractPartialFunctionClass = getClass("scala.runtime.AbstractPartialFunction")
- lazy val SymbolClass = getClass("scala.Symbol")
+ lazy val PartialFunctionClass = getRequiredClass("scala.PartialFunction")
+ lazy val AbstractPartialFunctionClass = getRequiredClass("scala.runtime.AbstractPartialFunction")
+ lazy val SymbolClass = getRequiredClass("scala.Symbol")
lazy val StringClass = getClass(sn.String)
lazy val StringModule = StringClass.linkedClassOfClass
lazy val ClassClass = getClass(sn.Class)
def Class_getMethod = getMember(ClassClass, nme.getMethod_)
- lazy val DynamicClass = getClass("scala.Dynamic")
+ lazy val DynamicClass = getRequiredClass("scala.Dynamic")
// fundamental modules
lazy val SysPackage = getPackageObject("scala.sys")
@@ -223,11 +268,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Those modules and their module classes
lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
- lazy val PredefModule: Symbol = getModule("scala.Predef")
+ lazy val PredefModule: Symbol = getRequiredModule("scala.Predef")
lazy val PredefModuleClass = PredefModule.moduleClass
- // Note: this is not the type alias AnyRef, it's a val defined in Predef
- // used by the @specialize annotation.
- def Predef_AnyRef = getMember(PredefModule, nme.AnyRef)
+
def Predef_classOf = getMember(PredefModule, nme.classOf)
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
@@ -241,27 +284,33 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isPredefMemberNamed(sym: Symbol, name: Name) = (
(sym.name == name) && (sym.owner == PredefModule.moduleClass)
)
+
+ /** Specialization.
+ */
+ lazy val SpecializableModule = getRequiredModule("scala.Specializable")
+ lazy val GroupOfSpecializable = SpecializableModule.info.member(newTypeName("Group"))
+
+ lazy val ConsoleModule: Symbol = getRequiredModule("scala.Console")
+ lazy val ScalaRunTimeModule: Symbol = getRequiredModule("scala.runtime.ScalaRunTime")
+ lazy val SymbolModule: Symbol = getRequiredModule("scala.Symbol")
+ lazy val Symbol_apply = SymbolModule.info decl nme.apply
- lazy val ConsoleModule: Symbol = getModule("scala.Console")
- lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime")
- lazy val SymbolModule: Symbol = getModule("scala.Symbol")
- lazy val Symbol_apply = getMember(SymbolModule, nme.apply)
def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
- def arrayApplyMethod = getMember(ScalaRunTimeModule, "array_apply")
- def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
- def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
- def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
- def ensureAccessibleMethod = getMember(ScalaRunTimeModule, "ensureAccessible")
+ def arrayApplyMethod = getMember(ScalaRunTimeModule, nme.array_apply)
+ def arrayUpdateMethod = getMember(ScalaRunTimeModule, nme.array_update)
+ def arrayLengthMethod = getMember(ScalaRunTimeModule, nme.array_length)
+ def arrayCloneMethod = getMember(ScalaRunTimeModule, nme.array_clone)
+ def ensureAccessibleMethod = getMember(ScalaRunTimeModule, nme.ensureAccessible)
def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
// classes with special meanings
- lazy val StringAddClass = getClass("scala.runtime.StringAdd")
- lazy val ArrowAssocClass = getClass("scala.Predef.ArrowAssoc")
+ lazy val StringAddClass = getRequiredClass("scala.runtime.StringAdd")
+ lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc")
lazy val StringAdd_+ = getMember(StringAddClass, nme.PLUS)
- lazy val NotNullClass = getClass("scala.NotNull")
- lazy val ScalaNumberClass = getClass("scala.math.ScalaNumber")
- lazy val TraitSetterAnnotationClass = getClass("scala.runtime.TraitSetter")
- lazy val DelayedInitClass = getClass("scala.DelayedInit")
+ lazy val NotNullClass = getRequiredClass("scala.NotNull")
+ lazy val ScalaNumberClass = getRequiredClass("scala.math.ScalaNumber")
+ lazy val TraitSetterAnnotationClass = getRequiredClass("scala.runtime.TraitSetter")
+ lazy val DelayedInitClass = getRequiredClass("scala.DelayedInit")
def delayedInitMethod = getMember(DelayedInitClass, nme.delayedInit)
// a dummy value that communicates that a delayedInit call is compiler-generated
// from phase UnCurry to phase Constructors
@@ -269,26 +318,19 @@ trait Definitions extends reflect.api.StandardDefinitions {
// def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
// .setInfo(UnitClass.tpe)
- lazy val TypeConstraintClass = getClass("scala.annotation.TypeConstraint")
- lazy val SingletonClass = newClass(ScalaPackageClass, tpnme.Singleton, anyparam) setFlag (ABSTRACT | TRAIT | FINAL)
- lazy val SerializableClass = getClass("scala.Serializable")
+ lazy val TypeConstraintClass = getRequiredClass("scala.annotation.TypeConstraint")
+ lazy val SingletonClass = newClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
+ lazy val SerializableClass = getRequiredClass("scala.Serializable")
lazy val JavaSerializableClass = getClass(sn.JavaSerializable)
- lazy val ComparableClass = getClass("java.lang.Comparable")
- lazy val JavaCloneableClass = getClass("java.lang.Cloneable")
- lazy val RemoteInterfaceClass = getClass("java.rmi.Remote")
- lazy val RemoteExceptionClass = getClass("java.rmi.RemoteException")
-
- lazy val RepeatedParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.REPEATED_PARAM_CLASS_NAME,
- tparam => seqType(tparam.typeConstructor)
- )
+ lazy val ComparableClass = getRequiredClass("java.lang.Comparable")
+ lazy val JavaCloneableClass = getRequiredClass("java.lang.Cloneable")
+ lazy val RemoteInterfaceClass = getRequiredClass("java.rmi.Remote")
+ lazy val RemoteExceptionClass = getRequiredClass("java.rmi.RemoteException")
- lazy val JavaRepeatedParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.JAVA_REPEATED_PARAM_CLASS_NAME,
- tparam => arrayType(tparam.typeConstructor)
- )
+ lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.typeConstructor)
+ lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.typeConstructor)
+ lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.typeConstructor))
+ lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.typeConstructor))
def isByNameParamType(tp: Type) = tp.typeSymbol == ByNameParamClass
def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
@@ -296,6 +338,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
+ def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
def isJavaVarArgs(params: List[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe)
def isScalaVarArgs(params: List[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
def isVarArgsList(params: List[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
@@ -316,89 +359,103 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => false
}
- lazy val ByNameParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.BYNAME_PARAM_CLASS_NAME,
- tparam => AnyClass.typeConstructor
- )
- lazy val EqualsPatternClass = {
- val clazz = newClass(ScalaPackageClass, tpnme.EQUALS_PATTERN_NAME, Nil)
- clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, new Scope, clazz))
- }
+ lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
// collections classes
- lazy val ConsClass = getClass("scala.collection.immutable.$colon$colon")
- lazy val IterableClass = getClass("scala.collection.Iterable")
- lazy val IteratorClass = getClass("scala.collection.Iterator")
- lazy val ListClass = getClass("scala.collection.immutable.List")
- lazy val SeqClass = getClass("scala.collection.Seq")
- lazy val StringBuilderClass = getClass("scala.collection.mutable.StringBuilder")
- lazy val TraversableClass = getClass("scala.collection.Traversable")
-
- lazy val ListModule = getModule("scala.collection.immutable.List")
+ lazy val ConsClass = getRequiredClass("scala.collection.immutable.$colon$colon")
+ lazy val IterableClass = getRequiredClass("scala.collection.Iterable")
+ lazy val IteratorClass = getRequiredClass("scala.collection.Iterator")
+ lazy val ListClass = getRequiredClass("scala.collection.immutable.List")
+ lazy val SeqClass = getRequiredClass("scala.collection.Seq")
+ lazy val StringBuilderClass = getRequiredClass("scala.collection.mutable.StringBuilder")
+ lazy val TraversableClass = getRequiredClass("scala.collection.Traversable")
+
+ lazy val ListModule = getRequiredModule("scala.collection.immutable.List")
lazy val List_apply = getMember(ListModule, nme.apply)
- lazy val NilModule = getModule("scala.collection.immutable.Nil")
- lazy val SeqModule = getModule("scala.collection.Seq")
+ lazy val NilModule = getRequiredModule("scala.collection.immutable.Nil")
+ lazy val SeqModule = getRequiredModule("scala.collection.Seq")
+ lazy val IteratorModule = getRequiredModule("scala.collection.Iterator")
+ lazy val Iterator_apply = getMember(IteratorModule, nme.apply)
// arrays and their members
- lazy val ArrayModule = getModule("scala.Array")
- def ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
- lazy val ArrayClass = getClass("scala.Array")
- def Array_apply = getMember(ArrayClass, nme.apply)
- def Array_update = getMember(ArrayClass, nme.update)
- def Array_length = getMember(ArrayClass, nme.length)
- lazy val Array_clone = getMember(ArrayClass, nme.clone_)
+ lazy val ArrayModule = getRequiredModule("scala.Array")
+ lazy val ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
+ lazy val ArrayClass = getRequiredClass("scala.Array")
+ lazy val Array_apply = getMember(ArrayClass, nme.apply)
+ lazy val Array_update = getMember(ArrayClass, nme.update)
+ lazy val Array_length = getMember(ArrayClass, nme.length)
+ lazy val Array_clone = getMember(ArrayClass, nme.clone_)
// reflection / structural types
- lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
- lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
+ lazy val SoftReferenceClass = getRequiredClass("java.lang.ref.SoftReference")
+ lazy val WeakReferenceClass = getRequiredClass("java.lang.ref.WeakReference")
lazy val MethodClass = getClass(sn.MethodAsObject)
def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
- lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
- lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
+ lazy val EmptyMethodCacheClass = getRequiredClass("scala.runtime.EmptyMethodCache")
+ lazy val MethodCacheClass = getRequiredClass("scala.runtime.MethodCache")
def methodCache_find = getMember(MethodCacheClass, nme.find_)
def methodCache_add = getMember(MethodCacheClass, nme.add_)
// scala.reflect
- lazy val ReflectApiUniverse = getClass("scala.reflect.api.Universe")
- lazy val ReflectRuntimeMirror = getModule("scala.reflect.runtime.Mirror")
- def freeValueMethod = getMember(ReflectRuntimeMirror, "freeValue")
+ lazy val ReflectApiUniverse = getRequiredClass("scala.reflect.api.Universe")
+ lazy val ReflectMacroContext = getRequiredClass("scala.reflect.macro.Context")
+ lazy val ReflectRuntimeMirror = getRequiredModule("scala.reflect.runtime.Mirror")
+ def freeValueMethod = getMember(ReflectRuntimeMirror, nme.freeValue)
lazy val ReflectPackage = getPackageObject("scala.reflect")
- def Reflect_mirror = getMember(ReflectPackage, "mirror")
+ def Reflect_mirror = getMember(ReflectPackage, nme.mirror)
+ lazy val PartialManifestClass = getRequiredClass("scala.reflect.ClassManifest")
+ lazy val PartialManifestModule = getRequiredModule("scala.reflect.ClassManifest")
+ lazy val FullManifestClass = getRequiredClass("scala.reflect.Manifest")
+ lazy val FullManifestModule = getRequiredModule("scala.reflect.Manifest")
+ lazy val OptManifestClass = getRequiredClass("scala.reflect.OptManifest")
+ lazy val NoManifest = getRequiredModule("scala.reflect.NoManifest")
- lazy val PartialManifestClass = getClass("scala.reflect.ClassManifest")
- lazy val PartialManifestModule = getModule("scala.reflect.ClassManifest")
- lazy val FullManifestClass = getClass("scala.reflect.Manifest")
- lazy val FullManifestModule = getModule("scala.reflect.Manifest")
- lazy val OptManifestClass = getClass("scala.reflect.OptManifest")
- lazy val NoManifest = getModule("scala.reflect.NoManifest")
- lazy val CodeClass = getClass(sn.Code)
- lazy val CodeModule = getModule(sn.Code)
- def Code_lift = getMember(CodeModule, nme.lift_)
-
- lazy val ScalaSignatureAnnotation = getClass("scala.reflect.ScalaSignature")
- lazy val ScalaLongSignatureAnnotation = getClass("scala.reflect.ScalaLongSignature")
+ lazy val ScalaSignatureAnnotation = getRequiredClass("scala.reflect.ScalaSignature")
+ lazy val ScalaLongSignatureAnnotation = getRequiredClass("scala.reflect.ScalaLongSignature")
// Option classes
- lazy val OptionClass: Symbol = getClass("scala.Option")
- lazy val SomeClass: Symbol = getClass("scala.Some")
- lazy val NoneModule: Symbol = getModule("scala.None")
- lazy val SomeModule: Symbol = getModule("scala.Some")
+ lazy val OptionClass: Symbol = getRequiredClass("scala.Option")
+ lazy val SomeClass: Symbol = getRequiredClass("scala.Some")
+ lazy val NoneModule: Symbol = getRequiredModule("scala.None")
+ lazy val SomeModule: Symbol = getRequiredModule("scala.Some")
+
+ /** Note: don't use this manifest/type function for anything important,
+ * as it is incomplete. Would love to have things like existential types
+ * working, but very unfortunately the manifests just stuff the relevant
+ * information into the toString method.
+ */
+ def manifestToType(m: OptManifest[_]): Type = m match {
+ case m: ClassManifest[_] =>
+ val sym = manifestToSymbol(m)
+ val args = m.typeArguments
+
+ if ((sym eq NoSymbol) || args.isEmpty) sym.tpe
+ else appliedType(sym.typeConstructor, args map manifestToType)
+ case _ =>
+ NoType
+ }
+
+ def manifestToSymbol(m: ClassManifest[_]): Symbol = m match {
+ case x: scala.reflect.AnyValManifest[_] =>
+ getMember(ScalaPackageClass, newTypeName("" + x))
+ case _ =>
+ val name = m.erasure.getName
+ if (name endsWith nme.MODULE_SUFFIX_STRING)
+ getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING)
+ else
+ getClassIfDefined(name)
+ }
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym
- // The given symbol is a method with the right signature to be a runnable java program.
- def isJavaMainMethod(sym: Symbol) = sym.tpe match {
- case MethodType(param :: Nil, restpe) if restpe.typeSymbol == UnitClass =>
- param.tpe match {
- case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == StringClass
- case _ => false
- }
- case _ => false
- }
+ // The given symbol is a method with the right name and signature to be a runnable java program.
+ def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match {
+ case MethodType(p :: Nil, restpe) => isArrayOfSymbol(p.tpe, StringClass) && restpe.typeSymbol == UnitClass
+ case _ => false
+ })
// The given class has a main method.
def hasJavaMainMethod(sym: Symbol): Boolean =
(sym.tpe member nme.main).alternatives exists isJavaMainMethod
@@ -416,7 +473,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Product, Tuple, Function
private def mkArityArray(name: String, arity: Int, countFrom: Int = 1): Array[Symbol] = {
- val list = countFrom to arity map (i => getClass("scala." + name + i))
+ val list = countFrom to arity map (i => getRequiredClass("scala." + name + i))
if (countFrom == 0) list.toArray
else (NoSymbol +: list).toArray
}
@@ -432,10 +489,24 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
lazy val isProductNClass = ProductClass.toSet
+ def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match {
+ case ByteClass => nme.wrapByteArray
+ case ShortClass => nme.wrapShortArray
+ case CharClass => nme.wrapCharArray
+ case IntClass => nme.wrapIntArray
+ case LongClass => nme.wrapLongArray
+ case FloatClass => nme.wrapFloatArray
+ case DoubleClass => nme.wrapDoubleArray
+ case BooleanClass => nme.wrapBooleanArray
+ case UnitClass => nme.wrapUnitArray
+ case _ =>
+ if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
- def tupleField(n: Int, j: Int) = getMember(TupleClass(n), "_" + j)
- def isTupleType(tp: Type): Boolean = isTupleType(tp, false)
- def isTupleTypeOrSubtype(tp: Type): Boolean = isTupleType(tp, true)
+ def tupleField(n: Int, j: Int) = getMember(TupleClass(n), nme.productAccessorName(j))
+ def isTupleType(tp: Type): Boolean = isTupleType(tp, false)
+ def isTupleTypeOrSubtype(tp: Type): Boolean = isTupleType(tp, true)
private def isTupleType(tp: Type, subtypeOK: Boolean) = tp.normalize match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val len = args.length
@@ -454,7 +525,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
} else NoType
}
- lazy val ProductRootClass: Symbol = getClass("scala.Product")
+ lazy val ProductRootClass: Symbol = getRequiredClass("scala.Product")
def Product_productArity = getMember(ProductRootClass, nme.productArity)
def Product_productElement = getMember(ProductRootClass, nme.productElement)
// def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
@@ -523,24 +594,51 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => NoType
}
- def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg))
- def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg))
- def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
+ def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg))
+ def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg))
+ def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
+ def iteratorOfType(tp: Type) = appliedType(IteratorClass.typeConstructor, List(tp))
+
+ lazy val StringArray = arrayType(StringClass.tpe)
+ lazy val ObjectArray = arrayType(ObjectClass.tpe)
def ClassType(arg: Type) =
if (phase.erasedTypes || forMSIL) ClassClass.tpe
else appliedType(ClassClass.typeConstructor, List(arg))
+
+ def vmClassType(arg: Type): Type = ClassType(arg)
+ def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
+
+ /** Given a class symbol C with type parameters T1, T2, ... Tn
+ * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
+ * returns an existential type of the form
+ *
+ * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
+ */
+ def classExistentialType(clazz: Symbol): Type =
+ newExistentialType(clazz.typeParams, clazz.tpe)
+
+ /** Given type U, creates a Type representing Class[_ <: U].
+ */
+ def boundedClassType(upperBound: Type) =
+ appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
+
+ /** To avoid unchecked warnings on polymorphic classes, translate
+ * a Foo[T] into a Foo[_] for use in the pattern matcher.
+ */
+ @deprecated("Use classExistentialType", "2.10.0")
+ def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
//
// .NET backend
//
- lazy val ComparatorClass = getClass("scala.runtime.Comparator")
+ lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
// System.ValueType
lazy val ValueTypeClass: Symbol = getClass(sn.ValueType)
// System.MulticastDelegate
lazy val DelegateClass: Symbol = getClass(sn.Delegate)
- var Delegate_scalaCallers: List[Symbol] = List()
+ var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported.
// Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
// var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap()
@@ -560,31 +658,40 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => false
})
}
-
+
// members of class scala.Any
- var Any_== : Symbol = _
- var Any_!= : Symbol = _
- var Any_equals : Symbol = _
- var Any_hashCode : Symbol = _
- var Any_toString : Symbol = _
- var Any_getClass : Symbol = _
- var Any_isInstanceOf: Symbol = _
- var Any_asInstanceOf: Symbol = _
- var Any_## : Symbol = _
-
- // members of class java.lang.{Object, String}
- var Object_eq : Symbol = _
- var Object_ne : Symbol = _
- var Object_== : Symbol = _
- var Object_!= : Symbol = _
- var Object_## : Symbol = _
- var Object_synchronized: Symbol = _
- lazy val Object_isInstanceOf = newPolyMethod(
- ObjectClass, "$isInstanceOf",
- tparam => MethodType(List(), booltype)) setFlag (FINAL | SYNTHETIC)
- lazy val Object_asInstanceOf = newPolyMethod(
- ObjectClass, "$asInstanceOf",
- tparam => MethodType(List(), tparam.typeConstructor)) setFlag (FINAL | SYNTHETIC)
+ lazy val Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
+ lazy val Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
+ lazy val Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
+ lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
+ lazy val Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
+ lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL)
+
+ // Any_getClass requires special handling. The return type is determined on
+ // a per-call-site basis as if the function being called were actually:
+ //
+ // // Assuming `target.getClass()`
+ // def getClass[T](target: T): Class[_ <: T]
+ //
+ // Since getClass is not actually a polymorphic method, this requires compiler
+ // participation. At the "Any" level, the return type is Class[_] as it is in
+ // java.lang.Object. Java also special cases the return type.
+ lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED)
+ lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
+ lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+
+ // members of class java.lang.{ Object, String }
+ lazy val Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
+ lazy val Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
+ lazy val Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
+ lazy val Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
+ lazy val Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
+ lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype)
+ lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor)
+ lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
+ (Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
+ )
+ lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL)
def Object_getClass = getMember(ObjectClass, nme.getClass_)
def Object_clone = getMember(ObjectClass, nme.clone_)
@@ -595,60 +702,58 @@ trait Definitions extends reflect.api.StandardDefinitions {
def Object_hashCode = getMember(ObjectClass, nme.hashCode_)
def Object_toString = getMember(ObjectClass, nme.toString_)
- var String_+ : Symbol = _
-
// boxed classes
- lazy val ObjectRefClass = getClass("scala.runtime.ObjectRef")
- lazy val VolatileObjectRefClass = getClass("scala.runtime.VolatileObjectRef")
- lazy val BoxesRunTimeClass = getModule("scala.runtime.BoxesRunTime")
+ lazy val ObjectRefClass = getRequiredClass("scala.runtime.ObjectRef")
+ lazy val VolatileObjectRefClass = getRequiredClass("scala.runtime.VolatileObjectRef")
+ lazy val BoxesRunTimeClass = getRequiredModule("scala.runtime.BoxesRunTime")
lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
- lazy val BoxedByteClass = getClass("java.lang.Byte")
- lazy val BoxedShortClass = getClass("java.lang.Short")
- lazy val BoxedIntClass = getClass("java.lang.Integer")
- lazy val BoxedLongClass = getClass("java.lang.Long")
- lazy val BoxedFloatClass = getClass("java.lang.Float")
- lazy val BoxedDoubleClass = getClass("java.lang.Double")
-
- lazy val BoxedUnitClass = getClass("scala.runtime.BoxedUnit")
- lazy val BoxedUnitModule = getModule("scala.runtime.BoxedUnit")
- def BoxedUnit_UNIT = getMember(BoxedUnitModule, "UNIT")
- def BoxedUnit_TYPE = getMember(BoxedUnitModule, "TYPE")
+ lazy val BoxedByteClass = getRequiredClass("java.lang.Byte")
+ lazy val BoxedShortClass = getRequiredClass("java.lang.Short")
+ lazy val BoxedIntClass = getRequiredClass("java.lang.Integer")
+ lazy val BoxedLongClass = getRequiredClass("java.lang.Long")
+ lazy val BoxedFloatClass = getRequiredClass("java.lang.Float")
+ lazy val BoxedDoubleClass = getRequiredClass("java.lang.Double")
+
+ lazy val BoxedUnitClass = getRequiredClass("scala.runtime.BoxedUnit")
+ lazy val BoxedUnitModule = getRequiredModule("scala.runtime.BoxedUnit")
+ def BoxedUnit_UNIT = getMember(BoxedUnitModule, nme.UNIT)
+ def BoxedUnit_TYPE = getMember(BoxedUnitModule, nme.TYPE_)
// Annotation base classes
- lazy val AnnotationClass = getClass("scala.annotation.Annotation")
- lazy val ClassfileAnnotationClass = getClass("scala.annotation.ClassfileAnnotation")
- lazy val StaticAnnotationClass = getClass("scala.annotation.StaticAnnotation")
+ lazy val AnnotationClass = getRequiredClass("scala.annotation.Annotation")
+ lazy val ClassfileAnnotationClass = getRequiredClass("scala.annotation.ClassfileAnnotation")
+ lazy val StaticAnnotationClass = getRequiredClass("scala.annotation.StaticAnnotation")
// Annotations
- lazy val BridgeClass = getClass("scala.annotation.bridge")
- lazy val ElidableMethodClass = getClass("scala.annotation.elidable")
- lazy val ImplicitNotFoundClass = getClass("scala.annotation.implicitNotFound")
- lazy val MigrationAnnotationClass = getClass("scala.annotation.migration")
- lazy val ScalaStrictFPAttr = getClass("scala.annotation.strictfp")
- lazy val SerializableAttr = getClass("scala.annotation.serializable") // @serializable is deprecated
- lazy val SwitchClass = getClass("scala.annotation.switch")
- lazy val TailrecClass = getClass("scala.annotation.tailrec")
- lazy val VarargsClass = getClass("scala.annotation.varargs")
- lazy val uncheckedStableClass = getClass("scala.annotation.unchecked.uncheckedStable")
- lazy val uncheckedVarianceClass = getClass("scala.annotation.unchecked.uncheckedVariance")
-
- lazy val BeanPropertyAttr = getClass("scala.beans.BeanProperty")
- lazy val BooleanBeanPropertyAttr = getClass("scala.beans.BooleanBeanProperty")
- lazy val CloneableAttr = getClass("scala.cloneable")
- lazy val DeprecatedAttr = getClass("scala.deprecated")
- lazy val DeprecatedNameAttr = getClass("scala.deprecatedName")
- lazy val NativeAttr = getClass("scala.native")
- lazy val RemoteAttr = getClass("scala.remote")
- lazy val ScalaInlineClass = getClass("scala.inline")
- lazy val ScalaNoInlineClass = getClass("scala.noinline")
- lazy val SerialVersionUIDAttr = getClass("scala.SerialVersionUID")
- lazy val SpecializedClass = getClass("scala.specialized")
- lazy val ThrowsClass = getClass("scala.throws")
- lazy val TransientAttr = getClass("scala.transient")
- lazy val UncheckedClass = getClass("scala.unchecked")
- lazy val VolatileAttr = getClass("scala.volatile")
+ lazy val BridgeClass = getRequiredClass("scala.annotation.bridge")
+ lazy val ElidableMethodClass = getRequiredClass("scala.annotation.elidable")
+ lazy val ImplicitNotFoundClass = getRequiredClass("scala.annotation.implicitNotFound")
+ lazy val MigrationAnnotationClass = getRequiredClass("scala.annotation.migration")
+ lazy val ScalaStrictFPAttr = getRequiredClass("scala.annotation.strictfp")
+ lazy val SerializableAttr = getRequiredClass("scala.annotation.serializable") // @serializable is deprecated
+ lazy val SwitchClass = getRequiredClass("scala.annotation.switch")
+ lazy val TailrecClass = getRequiredClass("scala.annotation.tailrec")
+ lazy val VarargsClass = getRequiredClass("scala.annotation.varargs")
+ lazy val uncheckedStableClass = getRequiredClass("scala.annotation.unchecked.uncheckedStable")
+ lazy val uncheckedVarianceClass = getRequiredClass("scala.annotation.unchecked.uncheckedVariance")
+
+ lazy val BeanPropertyAttr = getRequiredClass("scala.beans.BeanProperty")
+ lazy val BooleanBeanPropertyAttr = getRequiredClass("scala.beans.BooleanBeanProperty")
+ lazy val CloneableAttr = getRequiredClass("scala.cloneable")
+ lazy val DeprecatedAttr = getRequiredClass("scala.deprecated")
+ lazy val DeprecatedNameAttr = getRequiredClass("scala.deprecatedName")
+ lazy val NativeAttr = getRequiredClass("scala.native")
+ lazy val RemoteAttr = getRequiredClass("scala.remote")
+ lazy val ScalaInlineClass = getRequiredClass("scala.inline")
+ lazy val ScalaNoInlineClass = getRequiredClass("scala.noinline")
+ lazy val SerialVersionUIDAttr = getRequiredClass("scala.SerialVersionUID")
+ lazy val SpecializedClass = getRequiredClass("scala.specialized")
+ lazy val ThrowsClass = getRequiredClass("scala.throws")
+ lazy val TransientAttr = getRequiredClass("scala.transient")
+ lazy val UncheckedClass = getRequiredClass("scala.unchecked")
+ lazy val VolatileAttr = getRequiredClass("scala.volatile")
// Meta-annotations
lazy val BeanGetterTargetClass = getMetaAnnotation("beanGetter")
@@ -659,7 +764,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val SetterTargetClass = getMetaAnnotation("setter")
// TODO: module, moduleClass? package, packageObject?
- private def getMetaAnnotation(name: String) = getClass("scala.annotation.meta." + name)
+ private def getMetaAnnotation(name: String) = getRequiredClass("scala.annotation.meta." + name)
def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || (
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
@@ -677,11 +782,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
attr
}
- def getPackageObjectClass(fullname: Name): Symbol =
+ def getPackageObjectClass(fullname: String): Symbol =
getPackageObject(fullname).companionClass
- def getPackageObject(fullname: Name): Symbol =
- getModule(fullname).info member nme.PACKAGE
+ def getPackageObject(fullname: String): Symbol =
+ getModule(newTermName(fullname)).info member nme.PACKAGE
def getModule(fullname: Name): Symbol =
getModuleOrClass(fullname.toTermName)
@@ -691,6 +796,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
while (result.isAliasType) result = result.info.typeSymbol
result
}
+
+ def getRequiredModule(fullname: String): Symbol =
+ getModule(newTermNameCached(fullname))
+ def getRequiredClass(fullname: String): Symbol =
+ getClass(newTypeNameCached(fullname))
def getClassIfDefined(fullname: String): Symbol =
getClassIfDefined(newTypeName(fullname))
@@ -736,65 +846,37 @@ trait Definitions extends reflect.api.StandardDefinitions {
*/
private def getModuleOrClass(path: Name): Symbol = getModuleOrClass(path, path.length)
- private def newClass(owner: Symbol, name: TypeName, parents: List[Type]): Symbol = {
- val clazz = owner.newClass(NoPosition, name)
- clazz.setInfo(ClassInfoType(parents, new Scope, clazz))
- owner.info.decls.enter(clazz)
- clazz
- }
-
- private def newCovariantPolyClass(owner: Symbol, name: TypeName, parent: Symbol => Type): Symbol = {
- val clazz = newClass(owner, name, List())
- val tparam = newTypeParam(clazz, 0) setFlag COVARIANT
- val p = parent(tparam)
-/* p.typeSymbol.initialize
- println(p.typeSymbol + " flags: " + Flags.flagsToString(p.typeSymbol.flags))
- val parents = /*if (p.typeSymbol.isTrait)
- List(definitions.AnyRefClass.tpe, p)
- else*/ List(p)
- println("creating " + name + " with parents " + parents) */
- clazz.setInfo(
- polyType(
- List(tparam),
- ClassInfoType(List(AnyRefClass.tpe, p), new Scope, clazz)))
- }
-
- private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol = {
- val tpsym = owner.newAliasType(NoPosition, name)
- tpsym.setInfo(alias)
- owner.info.decls.enter(tpsym)
- tpsym
+ private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol =
+ owner.newAliasType(name) setInfoAndEnter alias
+
+ private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): Symbol = {
+ val clazz = newClass(ScalaPackageClass, name, Nil)
+ val tparam = clazz.newSyntheticTypeParam("T0", flags)
+ val parents = List(AnyRefClass.tpe, parentFn(tparam))
+
+ clazz setInfo polyType(List(tparam), ClassInfoType(parents, newScope, clazz))
}
+
+ def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): Symbol = {
+ val msym = owner.newMethod(name.encode, NoPosition, flags)
+ val tparams = msym.newSyntheticTypeParams(typeParamCount)
+ val mtpe = createFn(tparams) match {
+ case (Some(formals), restpe) => MethodType(msym.newSyntheticValueParams(formals), restpe)
+ case (_, restpe) => NullaryMethodType(restpe)
+ }
- private def newMethod(owner: Symbol, name: TermName): Symbol = {
- val msym = owner.newMethod(NoPosition, name.encode)
- owner.info.decls.enter(msym)
- msym
+ msym setInfoAndEnter polyType(tparams, mtpe)
}
-
- private[Definitions] def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type): Symbol = {
- val msym = newMethod(owner, name)
- val params = msym.newSyntheticValueParams(formals)
- msym.setInfo(MethodType(params, restpe))
+
+ /** T1 means one type parameter.
+ */
+ def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = {
+ newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head)))
}
-
- /** tcon receives the type parameter symbol as argument */
- private def newPolyMethod(owner: Symbol, name: TermName, tcon: Symbol => Type): Symbol =
- newPolyMethodCon(owner, name, tparam => msym => tcon(tparam))
-
- /** tcon receives the type parameter symbol and the method symbol as arguments */
- private def newPolyMethodCon(owner: Symbol, name: TermName, tcon: Symbol => Symbol => Type): Symbol = {
- val msym = newMethod(owner, name)
- val tparam = newTypeParam(msym, 0)
- msym.setInfo(polyType(List(tparam), tcon(tparam)(msym)))
+ def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = {
+ newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
}
- private def newParameterlessMethod(owner: Symbol, name: TermName, restpe: Type) =
- newMethod(owner, name).setInfo(NullaryMethodType(restpe))
-
- private def newTypeParam(owner: Symbol, index: Int): Symbol =
- owner.newTypeParameter(NoPosition, newTypeName("T" + index)) setInfo TypeBounds.empty
-
lazy val boxedClassValues = boxedClass.values.toSet
lazy val isUnbox = unboxMethod.values.toSet
lazy val isBox = boxMethod.values.toSet
@@ -809,8 +891,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
private lazy val boxedValueClassesSet = boxedClass.values.toSet + BoxedUnitClass
/** Is symbol a value class? */
- def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
- def isNonUnitValueClass(sym: Symbol) = (sym != UnitClass) && isValueClass(sym)
+ def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
+ def isNonUnitValueClass(sym: Symbol) = isValueClass(sym) && (sym != UnitClass)
+ def isSpecializableClass(sym: Symbol) = isValueClass(sym) || (sym == AnyRefClass)
def isScalaValueType(tp: Type) = scalaValueClassesSet(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
@@ -891,46 +974,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
-
- // members of class scala.Any
- Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype) setFlag FINAL
- Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype) setFlag FINAL
- Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
- Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
- Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
- Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
-
- // Any_getClass requires special handling. The return type is determined on
- // a per-call-site basis as if the function being called were actually:
- //
- // // Assuming `target.getClass()`
- // def getClass[T](target: T): Class[_ <: T]
- //
- // Since getClass is not actually a polymorphic method, this requires compiler
- // participation. At the "Any" level, the return type is Class[_] as it is in
- // java.lang.Object. Java also special cases the return type.
- Any_getClass = (
- newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType)
- setFlag DEFERRED
- )
- Any_isInstanceOf = newPolyMethod(
- AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL
- Any_asInstanceOf = newPolyMethod(
- AnyClass, nme.asInstanceOf_, tparam => NullaryMethodType(tparam.typeConstructor)) setFlag FINAL
-
- // members of class java.lang.{ Object, String }
- Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
- Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype) setFlag FINAL
- Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype) setFlag FINAL
- Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype) setFlag FINAL
- Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype) setFlag FINAL
- Object_synchronized = newPolyMethodCon(
- ObjectClass, nme.synchronized_,
- tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL
-
- String_+ = newMethod(
- StringClass, "+", anyparam, stringtype) setFlag FINAL
-
+
val forced = List( // force initialization of every symbol that is entered as a side effect
AnnotationDefaultAttr, // #2264
RepeatedParamClass,
@@ -943,8 +987,24 @@ trait Definitions extends reflect.api.StandardDefinitions {
NothingClass,
SingletonClass,
EqualsPatternClass,
+ Any_==,
+ Any_!=,
+ Any_equals,
+ Any_hashCode,
+ Any_toString,
+ Any_getClass,
+ Any_isInstanceOf,
+ Any_asInstanceOf,
+ Any_##,
+ Object_eq,
+ Object_ne,
+ Object_==,
+ Object_!=,
+ Object_##,
+ Object_synchronized,
Object_isInstanceOf,
- Object_asInstanceOf
+ Object_asInstanceOf,
+ String_+
)
/** Removing the anyref parent they acquire from having a source file.
@@ -962,11 +1022,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
assert(forMSIL, "scalaCallers can only be created if target is .NET")
// object: reference to object on which to call (scala-)method
val paramTypes: List[Type] = List(ObjectClass.tpe)
- val name: String = "$scalaCaller$$" + nbScalaCallers
+ val name = newTermName("$scalaCaller$$" + nbScalaCallers)
// tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
// type parameter =-> a MethodType in this case
// TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
- val newCaller = newMethod(DelegateClass, name, paramTypes, delegateType) setFlag (FINAL | STATIC)
+ val newCaller = newMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC)
// val newCaller = newPolyMethod(DelegateClass, name,
// tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 8366c6d63a..aa696bc6e8 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -7,12 +7,13 @@ package scala.reflect
package internal
import api.Modifier
+import scala.collection.{ mutable, immutable }
// Flags at each index of a flags Long. Those marked with /M are used in
// Parsers/JavaParsers and therefore definitely appear on Modifiers; but the
// absence of /M on the other flags does not imply they aren't.
//
-// Generated by mkFlagsTable() at Mon Oct 11 10:01:09 PDT 2010
+// Generated by mkFlagsTable() at Thu Feb 02 20:31:52 PST 2012
//
// 0: PROTECTED/M
// 1: OVERRIDE/M
@@ -29,7 +30,7 @@ import api.Modifier
// 12: MUTABLE/M
// 13: PARAM/M
// 14: PACKAGE
-// 15:
+// 15: MACRO/M
// 16: BYNAMEPARAM/M CAPTURED COVARIANT/M
// 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
// 18: ABSOVERRIDE/M
@@ -58,13 +59,13 @@ import api.Modifier
// 41: DEFAULTINIT/M
// 42: VBRIDGE
// 43: VARARGS
-// 44:
+// 44: TRIEDCOOKING
// 45:
// 46:
// 47:
// 48:
-// 49: latePRIVATE (eliminated)
-// 50: lateABSTRACT (eliminated)
+// 49:
+// 50:
// 51: lateDEFERRED
// 52: lateFINAL
// 53: lateMETHOD
@@ -73,10 +74,10 @@ import api.Modifier
// 56: notPROTECTED
// 57: notOVERRIDE
// 58: notPRIVATE
-// 59: notABSTRACT (eliminated)
-// 60: notDEFERRED (eliminated)
-// 61: notFINAL (eliminated)
-// 62: notMETHOD (eliminated)
+// 59:
+// 60:
+// 61:
+// 62:
// 63:
/** Flags set on Modifiers instances in the parsing stage.
@@ -225,6 +226,7 @@ class Flags extends ModifierFlags {
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
+ final val BridgeAndPrivateFlags = BridgeFlags | PRIVATE
/** When a symbol for a field is created, only these flags survive
* from Modifiers. Others which may be applied at creation time are:
@@ -335,7 +337,7 @@ class Flags extends ModifierFlags {
// ------ displaying flags --------------------------------------------------------
- // Generated by mkFlagToStringMethod() at Mon Oct 11 10:12:36 PDT 2010
+ // Generated by mkFlagToStringMethod() at Thu Feb 02 20:31:52 PST 2012
@annotation.switch override def flagToString(flag: Long): String = flag match {
case PROTECTED => "protected" // (1L << 0)
case OVERRIDE => "override" // (1L << 1)
@@ -352,7 +354,7 @@ class Flags extends ModifierFlags {
case MUTABLE => "<mutable>" // (1L << 12)
case PARAM => "<param>" // (1L << 13)
case PACKAGE => "<package>" // (1L << 14)
- case MACRO => "macro" // (1L << 15)
+ case MACRO => "<macro>" // (1L << 15)
case BYNAMEPARAM => "<bynameparam/captured/covariant>" // (1L << 16)
case CONTRAVARIANT => "<contravariant/inconstructor/label>" // (1L << 17)
case ABSOVERRIDE => "absoverride" // (1L << 18)
@@ -381,13 +383,13 @@ class Flags extends ModifierFlags {
case DEFAULTINIT => "<defaultinit>" // (1L << 41)
case VBRIDGE => "<vbridge>" // (1L << 42)
case VARARGS => "<varargs>" // (1L << 43)
- case 0x100000000000L => "" // (1L << 44)
+ case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case 0x200000000000L => "" // (1L << 45)
case 0x400000000000L => "" // (1L << 46)
case 0x800000000000L => "" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
- // case `latePRIVATE` => "<lateprivate>" // (1L << 49)
- // case `lateABSTRACT` => "<lateabstract>" // (1L << 50)
+ case 0x2000000000000L => "" // (1L << 49)
+ case 0x4000000000000L => "" // (1L << 50)
case `lateDEFERRED` => "<latedeferred>" // (1L << 51)
case `lateFINAL` => "<latefinal>" // (1L << 52)
case `lateMETHOD` => "<latemethod>" // (1L << 53)
@@ -396,10 +398,10 @@ class Flags extends ModifierFlags {
case `notPROTECTED` => "<notprotected>" // (1L << 56)
case `notOVERRIDE` => "<notoverride>" // (1L << 57)
case `notPRIVATE` => "<notprivate>" // (1L << 58)
- // case `notABSTRACT` => "<notabstract>" // (1L << 59)
- // case `notDEFERRED` => "<notdeferred>" // (1L << 60)
- // case `notFINAL` => "<notfinal>" // (1L << 61)
- // case `notMETHOD` => "<notmethod>" // (1L << 62)
+ case 0x800000000000000L => "" // (1L << 59)
+ case 0x1000000000000000L => "" // (1L << 60)
+ case 0x2000000000000000L => "" // (1L << 61)
+ case 0x4000000000000000L => "" // (1L << 62)
case 0x8000000000000000L => "" // (1L << 63)
case _ => ""
}
@@ -426,8 +428,29 @@ class Flags extends ModifierFlags {
List(flagsToString(f), pw) filterNot (_ == "") mkString " "
}
- def flagsToString(flags: Long): String =
- pickledListOrder map (mask => flagToString(flags & mask)) filterNot (_ == "") mkString " "
+ // List of the raw flags, in pickled order
+ protected final val MaxBitPosition = 62
+
+ def flagsToString(flags: Long): String = {
+ // Fast path for common case
+ if (flags == 0L) "" else {
+ var sb: StringBuilder = null
+ var i = 0
+ while (i <= MaxBitPosition) {
+ val mask = rawFlagPickledOrder(i)
+ if ((flags & mask) != 0L) {
+ val s = flagToString(mask)
+ if (s.length > 0) {
+ if (sb eq null) sb = new StringBuilder append s
+ else if (sb.length == 0) sb append s
+ else sb append " " append s
+ }
+ }
+ i += 1
+ }
+ if (sb eq null) "" else sb.toString
+ }
+ }
def rawFlagsToPickled(flags: Long): Long =
(flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
@@ -435,15 +458,15 @@ class Flags extends ModifierFlags {
def pickledToRawFlags(pflags: Long): Long =
(pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
- // List of the raw flags, in pickled order
- protected val pickledListOrder: List[Long] = {
- val all = 0 to 62 map (1L << _)
+ protected final val pickledListOrder: List[Long] = {
+ val all = 0 to MaxBitPosition map (1L << _)
val front = rawFlags map (_.toLong)
front.toList ++ (all filterNot (front contains _))
}
+ protected final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
- def flagOfModifier(mod: Modifier.Value): Long = mod match {
+ def flagOfModifier(mod: Modifier): Long = mod match {
case Modifier.`protected` => PROTECTED
case Modifier.`private` => PRIVATE
case Modifier.`override` => OVERRIDE
@@ -473,13 +496,13 @@ class Flags extends ModifierFlags {
case Modifier.bynameParameter => BYNAMEPARAM
}
- def flagsOfModifiers(mods: List[Modifier.Value]): Long =
+ def flagsOfModifiers(mods: List[Modifier]): Long =
(mods :\ 0L) { (mod, curr) => curr | flagOfModifier(mod) }
- def modifierOfFlag(flag: Long): Option[Modifier.Value] =
+ def modifierOfFlag(flag: Long): Option[Modifier] =
Modifier.values find { mod => flagOfModifier(mod) == flag }
- def modifiersOfFlags(flags: Long): List[Modifier.Value] =
+ def modifiersOfFlags(flags: Long): List[Modifier] =
pickledListOrder map (mask => modifierOfFlag(flags & mask)) flatMap { mod => mod }
}
diff --git a/src/compiler/scala/reflect/internal/HasFlags.scala b/src/compiler/scala/reflect/internal/HasFlags.scala
index 46dca0940a..ec4e919bdc 100644
--- a/src/compiler/scala/reflect/internal/HasFlags.scala
+++ b/src/compiler/scala/reflect/internal/HasFlags.scala
@@ -136,6 +136,9 @@ trait HasFlags {
/** Whether this entity has NONE of the flags in the given mask.
*/
def hasNoFlags(mask: Long): Boolean = !hasFlag(mask)
+
+ protected def isSetting(f: Long, mask: Long) = !hasFlag(f) && ((mask & f) != 0L)
+ protected def isClearing(f: Long, mask: Long) = hasFlag(f) && ((mask & f) != 0L)
// Tests which come through cleanly: both Symbol and Modifiers use these
// identically, testing for a single flag.
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index 60b353a7c4..1ae4f755ed 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -9,19 +9,38 @@ trait Importers { self: SymbolTable =>
val from: SymbolTable
lazy val symMap: WeakHashMap[from.Symbol, Symbol] = new WeakHashMap
+ lazy val tpeMap: WeakHashMap[from.Type, Type] = new WeakHashMap
+
+ // fixups and maps prevent stackoverflows in importer
+ var pendingSyms = 0
+ var pendingTpes = 0
+ lazy val fixups = collection.mutable.MutableList[Function0[Unit]]()
+ def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
+ def tryFixup(): Unit = {
+ if (pendingSyms == 0 && pendingTpes == 0) {
+ val fixups = this.fixups.toList
+ this.fixups.clear()
+ fixups foreach { _() }
+ }
+ }
object reverse extends from.Importer {
val from: self.type = self
for ((fromsym, mysym) <- Importer.this.symMap) symMap += ((mysym, fromsym))
+ for ((fromtpe, mytpe) <- Importer.this.tpeMap) tpeMap += ((mytpe, fromtpe))
}
def importPosition(pos: from.Position): Position = NoPosition
- def importSymbol(sym: from.Symbol): Symbol = {
+ def importSymbol(sym0: from.Symbol): Symbol = {
def doImport(sym: from.Symbol): Symbol = {
+ if (symMap.contains(sym))
+ return symMap(sym)
+
val myowner = importSymbol(sym.owner)
- val mypos = importPosition(sym.pos)
- val myname = importName(sym.name)
+ val mypos = importPosition(sym.pos)
+ val myname = importName(sym.name).toTermName
+ val myflags = sym.flags
def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
symMap(x) = mysym
mysym.referenced = op(x.referenced)
@@ -29,39 +48,40 @@ trait Importers { self: SymbolTable =>
}
val mysym = sym match {
case x: from.MethodSymbol =>
- linkReferenced(new MethodSymbol(myowner, mypos, myname), x, importSymbol)
+ linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
case x: from.ModuleSymbol =>
- linkReferenced(new ModuleSymbol(myowner, mypos, myname), x, doImport)
+ linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
case x: from.FreeVar =>
- new FreeVar(importName(x.name), importType(x.tpe), x.value)
+ newFreeVar(importName(x.name).toTermName, importType(x.tpe), x.value, myflags)
case x: from.TermSymbol =>
- linkReferenced(new TermSymbol(myowner, mypos, myname), x, importSymbol)
+ linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
case x: from.TypeSkolem =>
- new TypeSkolem(myowner, mypos, myname.toTypeName, x.unpackLocation match {
- case null => null
- case y: from.Tree => importTree(y)
+ val origin = x.unpackLocation match {
+ case null => null
+ case y: from.Tree => importTree(y)
case y: from.Symbol => importSymbol(y)
- })
- /*
- case x: from.ModuleClassSymbol =>
- val mysym = new ModuleClassSymbol(myowner, mypos, myname.toTypeName)
- mysym.sourceModule = importSymbol(x.sourceModule)
- mysym
-*/
+ }
+ myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+ case x: from.ModuleClassSymbol =>
+ val mysym = myowner.newModuleClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap(x) = mysym
+ mysym.sourceModule = importSymbol(x.sourceModule)
+ mysym
case x: from.ClassSymbol =>
- val mysym = new ClassSymbol(myowner, mypos, myname.toTypeName)
+ val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap(x) = mysym
if (sym.thisSym != sym) {
mysym.typeOfThis = importType(sym.typeOfThis)
mysym.thisSym.name = importName(sym.thisSym.name)
}
mysym
case x: from.TypeSymbol =>
- new TypeSymbol(myowner, mypos, myname.toTypeName)
+ myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
}
symMap(sym) = mysym
- mysym setFlag sym.flags | Flags.LOCKED
+ mysym setFlag Flags.LOCKED
mysym setInfo {
- val mytypeParams = sym.typeParams map doImport
+ val mytypeParams = sym.typeParams map importSymbol
new LazyPolyType(mytypeParams) {
override def complete(s: Symbol) {
val result = sym.info match {
@@ -76,7 +96,8 @@ trait Importers { self: SymbolTable =>
mysym resetFlag Flags.LOCKED
} // end doImport
- def importOrRelink: Symbol =
+ def importOrRelink: Symbol = {
+ val sym = sym0 // makes sym visible in the debugger
if (sym == null)
null
else if (sym == from.NoSymbol)
@@ -84,116 +105,160 @@ trait Importers { self: SymbolTable =>
else if (sym.isRoot)
definitions.RootClass
else {
- val myowner = importSymbol(sym.owner)
- val myname = importName(sym.name)
- if (sym.isModuleClass) {
- assert(sym.sourceModule != NoSymbol, sym)
- val mymodule = importSymbol(sym.sourceModule)
- assert(mymodule != NoSymbol, sym)
- assert(mymodule.moduleClass != NoSymbol, mymodule)
- mymodule.moduleClass
- } else if (myowner.isClass && !myowner.isRefinementClass && !(myowner hasFlag Flags.LOCKED) && sym.owner.info.decl(sym.name).exists) {
- // symbol is in class scope, try to find equivalent one in local scope
- if (sym.isOverloaded)
- myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
- else {
- var existing: Symbol = myowner.info.decl(myname)
- if (existing.isOverloaded) {
- existing =
- if (sym.isMethod) {
- val localCopy = doImport(sym)
- existing filter (_.tpe matches localCopy.tpe)
- } else {
- existing filter (!_.isMethod)
- }
- assert(!existing.isOverloaded,
- "import failure: cannot determine unique overloaded method alternative from\n "+
- (existing.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
+ val name = sym.name
+ val owner = sym.owner
+ var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
+ var existing = scope.decl(name)
+ if (sym.isPackageClass || sym.isModuleClass) existing = existing.moduleClass
+ if (!existing.exists) scope = from.NoType
+
+ val myname = importName(name)
+ val myowner = importSymbol(owner)
+ val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+ var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
+ if (sym.isPackageClass || sym.isModuleClass) myexisting = importSymbol(sym.sourceModule).moduleClass
+ if (!sym.isOverloaded && myexisting.isOverloaded) {
+ myexisting =
+ if (sym.isMethod) {
+ val localCopy = doImport(sym)
+ myexisting filter (_.tpe matches localCopy.tpe)
+ } else {
+ myexisting filter (!_.isMethod)
}
- if (existing != NoSymbol) existing
- else {
+ assert(!myexisting.isOverloaded,
+ "import failure: cannot determine unique overloaded method alternative from\n "+
+ (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
+ }
+
+ val mysym = {
+ if (sym.isOverloaded) {
+ myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
+ } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
+ assert(myowner.typeParams.length > sym.paramPos,
+ "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
+ myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
+ sym.owner+from.typeParamsString(sym.owner.info))
+ myowner.typeParams(sym.paramPos)
+ } else {
+ if (myexisting != NoSymbol) {
+ myexisting
+ } else {
val mysym = doImport(sym)
- assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+existing)
- myowner.info.decls enter mysym
+
+ if (myscope != NoType) {
+ assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
+ myowner.info.decls enter mysym
+ }
+
mysym
}
}
- } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
- assert(myowner.typeParams.length > sym.paramPos,
- "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
- myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
- sym.owner+from.typeParamsString(sym.owner.info))
- myowner.typeParams(sym.paramPos)
- } else
- doImport(sym)
+ }
+
+ mysym
+ }
+ } // end importOrRelink
+
+ val sym = sym0
+ if (symMap contains sym) {
+ symMap(sym)
+ } else {
+ pendingSyms += 1
+
+ try {
+ symMap getOrElseUpdate (sym, importOrRelink)
+ } finally {
+ pendingSyms -= 1
+ tryFixup()
}
- symMap getOrElseUpdate (sym, importOrRelink)
+ }
}
- def importType(tpe: from.Type): Type = tpe match {
- case from.TypeRef(pre, sym, args) =>
- TypeRef(importType(pre), importSymbol(sym), args map importType)
- case from.ThisType(clazz) =>
- ThisType(importSymbol(clazz))
- case from.SingleType(pre, sym) =>
- SingleType(importType(pre), importSymbol(sym))
- case from.MethodType(params, restpe) =>
- MethodType(params map importSymbol, importType(restpe))
- case from.PolyType(tparams, restpe) =>
- PolyType(tparams map importSymbol, importType(restpe))
- case from.NullaryMethodType(restpe) =>
- NullaryMethodType(importType(restpe))
- case from.ConstantType(from.Constant(value)) =>
- ConstantType(Constant(value))
- case from.SuperType(thistpe, supertpe) =>
- SuperType(importType(thistpe), importType(supertpe))
- case from.TypeBounds(lo, hi) =>
- TypeBounds(importType(lo), importType(hi))
- case from.BoundedWildcardType(bounds) =>
- BoundedWildcardType(importTypeBounds(bounds))
- case from.ClassInfoType(parents, decls, clazz) =>
- val myclazz = importSymbol(clazz)
- val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
- val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
- myclazz setInfo polyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
- decls foreach importSymbol // will enter itself into myclazz
- myclazzTpe
- case from.RefinedType(parents, decls) =>
- RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
- case from.ExistentialType(tparams, restpe) =>
- ExistentialType(tparams map importSymbol, importType(restpe))
- case from.OverloadedType(pre, alts) =>
- OverloadedType(importType(pre), alts map importSymbol)
- case from.AntiPolyType(pre, targs) =>
- AntiPolyType(importType(pre), targs map importType)
- case x: from.TypeVar =>
- new TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
- case from.NotNullType(tpe) =>
- NotNullType(importType(tpe))
- case from.AnnotatedType(annots, tpe, selfsym) =>
- AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
- case from.ErrorType =>
- ErrorType
- case from.WildcardType =>
- WildcardType
- case from.NoType =>
- NoType
- case from.NoPrefix =>
- NoPrefix
- case null =>
- null
+ def importType(tpe: from.Type): Type = {
+ def doImport(tpe: from.Type): Type = tpe match {
+ case from.TypeRef(pre, sym, args) =>
+ TypeRef(importType(pre), importSymbol(sym), args map importType)
+ case from.ThisType(clazz) =>
+ ThisType(importSymbol(clazz))
+ case from.SingleType(pre, sym) =>
+ SingleType(importType(pre), importSymbol(sym))
+ case from.MethodType(params, restpe) =>
+ MethodType(params map importSymbol, importType(restpe))
+ case from.PolyType(tparams, restpe) =>
+ PolyType(tparams map importSymbol, importType(restpe))
+ case from.NullaryMethodType(restpe) =>
+ NullaryMethodType(importType(restpe))
+ case from.ConstantType(constant @ from.Constant(_)) =>
+ ConstantType(importConstant(constant))
+ case from.SuperType(thistpe, supertpe) =>
+ SuperType(importType(thistpe), importType(supertpe))
+ case from.TypeBounds(lo, hi) =>
+ TypeBounds(importType(lo), importType(hi))
+ case from.BoundedWildcardType(bounds) =>
+ BoundedWildcardType(importTypeBounds(bounds))
+ case from.ClassInfoType(parents, decls, clazz) =>
+ val myclazz = importSymbol(clazz)
+ val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+ val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+ myclazz setInfo polyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+ decls foreach importSymbol // will enter itself into myclazz
+ myclazzTpe
+ case from.RefinedType(parents, decls) =>
+ RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
+ case from.ExistentialType(tparams, restpe) =>
+ newExistentialType(tparams map importSymbol, importType(restpe))
+ case from.OverloadedType(pre, alts) =>
+ OverloadedType(importType(pre), alts map importSymbol)
+ case from.AntiPolyType(pre, targs) =>
+ AntiPolyType(importType(pre), targs map importType)
+ case x: from.TypeVar =>
+ TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
+ case from.NotNullType(tpe) =>
+ NotNullType(importType(tpe))
+ case from.AnnotatedType(annots, tpe, selfsym) =>
+ AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
+ case from.ErrorType =>
+ ErrorType
+ case from.WildcardType =>
+ WildcardType
+ case from.NoType =>
+ NoType
+ case from.NoPrefix =>
+ NoPrefix
+ case null =>
+ null
+ } // end doImport
+
+ def importOrRelink: Type =
+ doImport(tpe)
+
+ if (tpeMap contains tpe) {
+ tpeMap(tpe)
+ } else {
+ pendingTpes += 1
+
+ try {
+ tpeMap getOrElseUpdate (tpe, importOrRelink)
+ } finally {
+ pendingTpes -= 1
+ tryFixup()
+ }
+ }
}
def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
- def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo =
- AnnotationInfo(importType(ann.atp), ann.args map importTree, ann.assocs map {
- case (name, arg) => (importName(name), importAnnotArg(arg))
- })
+ def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
+ val atp1 = importType(ann.atp)
+ val args1 = ann.args map importTree
+ val assocs1 = ann.assocs map { case (name, arg) => (importName(name), importAnnotArg(arg)) }
+ val original1 = importTree(ann.original)
+ AnnotationInfo(atp1, args1, assocs1) setOriginal original1
+ }
def importAnnotArg(arg: from.ClassfileAnnotArg): ClassfileAnnotArg = arg match {
- case from.LiteralAnnotArg(from.Constant(value)) =>
- LiteralAnnotArg(Constant(value))
+ case from.LiteralAnnotArg(constant @ from.Constant(_)) =>
+ LiteralAnnotArg(importConstant(constant))
case from.ArrayAnnotArg(args) =>
ArrayAnnotArg(args map importAnnotArg)
case from.ScalaSigBytes(bytes) =>
@@ -208,9 +273,9 @@ trait Importers { self: SymbolTable =>
result
}
- // !!! todo: override to vcater for PackageScopes
+ // !!! todo: override to cater for PackageScopes
def importScope(decls: from.Scope): Scope =
- new Scope(decls.toList map importSymbol)
+ newScopeWith(decls.toList map importSymbol: _*)
def importName(name: from.Name): Name =
if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
@@ -221,7 +286,7 @@ trait Importers { self: SymbolTable =>
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
def importImportSelector(sel: from.ImportSelector): ImportSelector =
- new ImportSelector(importName(sel.name), sel.namePos, importName(sel.rename), sel.renamePos)
+ new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
def importTree(tree: from.Tree): Tree = {
val mytree = tree match {
@@ -263,6 +328,8 @@ trait Importers { self: SymbolTable =>
new Function(vparams map importValDef, importTree(body))
case from.Assign(lhs, rhs) =>
new Assign(importTree(lhs), importTree(rhs))
+ case from.AssignOrNamedArg(lhs, rhs) =>
+ new AssignOrNamedArg(importTree(lhs), importTree(rhs))
case from.If(cond, thenp, elsep) =>
new If(importTree(cond), importTree(thenp), importTree(elsep))
case from.Match(selector, cases) =>
@@ -301,8 +368,8 @@ trait Importers { self: SymbolTable =>
case _ =>
new Ident(importName(name))
}
- case from.Literal(from.Constant(value)) =>
- new Literal(Constant(value))
+ case from.Literal(constant @ from.Constant(_)) =>
+ new Literal(importConstant(constant))
case from.TypeTree() =>
new TypeTree()
case from.Annotated(annot, arg) =>
@@ -324,10 +391,24 @@ trait Importers { self: SymbolTable =>
case null =>
null
}
- if (mytree != null) {
- if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol)
- mytree.tpe = importType(tree.tpe)
- }
+ addFixup({
+ if (mytree != null) {
+ val mysym = if (tree hasSymbol) importSymbol(tree.symbol) else NoSymbol
+ val mytpe = importType(tree.tpe)
+
+ mytree match {
+ case mytt: TypeTree =>
+ val tt = tree.asInstanceOf[from.TypeTree]
+ if (mytree hasSymbol) mytt.symbol = mysym
+ if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
+ if (tt.original != null) mytt.setOriginal(importTree(tt.original))
+ case _ =>
+ if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol)
+ mytree.tpe = importType(tree.tpe)
+ }
+ }
+ })
+ tryFixup()
mytree
}
@@ -337,5 +418,10 @@ trait Importers { self: SymbolTable =>
def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree]
def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident]
def importCaseDef(tree: from.CaseDef): CaseDef = importTree(tree).asInstanceOf[CaseDef]
+ def importConstant(constant: from.Constant): Constant = new Constant(constant.tag match {
+ case ClassTag => importType(constant.value.asInstanceOf[from.Type])
+ case EnumTag => importSymbol(constant.value.asInstanceOf[from.Symbol])
+ case _ => constant.value
+ })
}
-}
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/InfoTransformers.scala b/src/compiler/scala/reflect/internal/InfoTransformers.scala
index 9c54b1b4cd..96d9d8f076 100644
--- a/src/compiler/scala/reflect/internal/InfoTransformers.scala
+++ b/src/compiler/scala/reflect/internal/InfoTransformers.scala
@@ -9,6 +9,8 @@ package internal
trait InfoTransformers {
self: SymbolTable =>
+ /* Syncnote: This should not need to be protected, as reflection does not run in multiple phases.
+ */
abstract class InfoTransformer {
var prev: InfoTransformer = this
var next: InfoTransformer = this
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
new file mode 100644
index 0000000000..e675be43dc
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -0,0 +1,221 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
+
+trait Kinds {
+ self: SymbolTable =>
+
+ import definitions._
+
+ private type SymPair = ((Symbol, Symbol)) // ((Argument, Parameter))
+
+ case class KindErrors(
+ arity: List[SymPair] = Nil,
+ variance: List[SymPair] = Nil,
+ strictness: List[SymPair] = Nil
+ ) {
+ def isEmpty = arity.isEmpty && variance.isEmpty && strictness.isEmpty
+
+ def arityError(syms: SymPair) = copy(arity = arity :+ syms)
+ def varianceError(syms: SymPair) = copy(variance = variance :+ syms)
+ def strictnessError(syms: SymPair) = copy(strictness = strictness :+ syms)
+
+ def ++(errs: KindErrors) = KindErrors(
+ arity ++ errs.arity,
+ variance ++ errs.variance,
+ strictness ++ errs.strictness
+ )
+ // @M TODO this method is duplicated all over the place (varianceString)
+ private def varStr(s: Symbol): String =
+ if (s.isCovariant) "covariant"
+ else if (s.isContravariant) "contravariant"
+ else "invariant";
+
+ private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
+ if((a0 eq b0) || (a0.owner eq b0.owner)) ""
+ else {
+ var a = a0; var b = b0
+ while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
+ if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
+ }
+ }
+ private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
+ f(a+qualify(a,p), p+qualify(p,a))
+
+ private def strictnessMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format(
+ _, a.info, _, p.info))
+
+ private def varianceMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
+
+ private def arityMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s has %s, but %s has %s".format(
+ _, countElementsAsString(a.typeParams.length, "type parameter"),
+ _, countAsString(p.typeParams.length))
+ )
+
+ def errorMessage(targ: Type, tparam: Symbol): String = (
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ")
+ + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ")
+ + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ")
+ + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ")
+ )
+ }
+ val NoKindErrors = KindErrors(Nil, Nil, Nil)
+
+ // TODO: this desperately needs to be cleaned up
+ // plan: split into kind inference and subkinding
+ // every Type has a (cached) Kind
+ def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
+ checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+
+ /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
+ *
+ * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
+ */
+ private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
+ sym2.variance==0
+ || sym1.variance==sym2.variance
+ )
+
+ /** Check well-kindedness of type application (assumes arities are already checked) -- @M
+ *
+ * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
+ * (checked one type member at a time -- in that case, prefix is the name of the type alias)
+ *
+ * Type application is just like value application: it's "contravariant" in the sense that
+ * the type parameters of the supplied type arguments must conform to the type parameters of
+ * the required type parameters:
+ * - their bounds must be less strict
+ * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
+ * - @M TODO: are these conditions correct,sufficient&necessary?
+ *
+ * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
+ * List's type parameter is also covariant and its bounds are weaker than <: Int
+ */
+ def checkKindBounds0(
+ tparams: List[Symbol],
+ targs: List[Type],
+ pre: Type,
+ owner: Symbol,
+ explainErrors: Boolean
+ ): List[(Type, Symbol, KindErrors)] = {
+
+ // instantiate type params that come from outside the abstract type we're currently checking
+ def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz)
+
+ // check that the type parameters hkargs to a higher-kinded type conform to the
+ // expected params hkparams
+ def checkKindBoundsHK(
+ hkargs: List[Symbol],
+ arg: Symbol,
+ param: Symbol,
+ paramowner: Symbol,
+ underHKParams: List[Symbol],
+ withHKArgs: List[Symbol]
+ ): KindErrors = {
+
+ var kindErrors: KindErrors = NoKindErrors
+ def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
+ // @M sometimes hkargs != arg.typeParams, the symbol and the type may
+ // have very different type parameters
+ val hkparams = param.typeParams
+
+ def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
+ if (!cond)
+ kindErrors = f(kindErrors)
+ }
+
+ if (settings.debug.value) {
+ log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
+ log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
+ log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
+ }
+
+ if (!sameLength(hkargs, hkparams)) {
+ // Any and Nothing are kind-overloaded
+ if (arg == AnyClass || arg == NothingClass) NoKindErrors
+ // shortcut: always set error, whether explainTypesOrNot
+ else return kindErrors.arityError(arg -> param)
+ }
+ else foreach2(hkargs, hkparams) { (hkarg, hkparam) =>
+ if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
+ kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam))
+ // instantiateTypeParams(tparams, targs)
+ // higher-order bounds, may contain references to type arguments
+ // substSym(hkparams, hkargs)
+ // these types are going to be compared as types of kind *
+ //
+ // Their arguments use different symbols, but are
+ // conceptually the same. Could also replace the types by
+ // polytypes, but can't just strip the symbols, as ordering
+ // is lost then.
+ val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner)
+ val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner)
+ val argumentBounds = transform(hkarg.info.bounds, owner)
+
+ kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam))
+
+ debuglog(
+ "checkKindBoundsHK base case: " + hkparam +
+ " declared bounds: " + declaredBounds +
+ " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
+ "checkKindBoundsHK base case: "+ hkarg +
+ " has bounds: " + argumentBounds
+ )
+ }
+ else {
+ debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ kindErrors ++= checkKindBoundsHK(
+ hkarg.typeParams,
+ hkarg,
+ hkparam,
+ paramowner,
+ underHKParams ++ hkparam.typeParams,
+ withHKArgs ++ hkarg.typeParams
+ )
+ }
+ if (!explainErrors && !kindErrors.isEmpty)
+ return kindErrors
+ }
+ if (explainErrors) kindErrors
+ else NoKindErrors
+ }
+
+ if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log(
+ "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
+ + owner + ", " + explainErrors + ")"
+ )
+
+ flatMap2(tparams, targs) { (tparam, targ) =>
+ // Prevent WildcardType from causing kind errors, as typevars may be higher-order
+ if (targ == WildcardType) Nil else {
+ // force symbol load for #4205
+ targ.typeSymbolDirect.info
+ // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
+ val tparamsHO = targ.typeParams
+ if (targ.isHigherKinded || tparam.typeParams.nonEmpty) {
+ // NOTE: *not* targ.typeSymbol, which normalizes
+ val kindErrors = checkKindBoundsHK(
+ tparamsHO, targ.typeSymbolDirect, tparam,
+ tparam.owner, tparam.typeParams, tparamsHO
+ )
+ if (kindErrors.isEmpty) Nil else {
+ if (explainErrors) List((targ, tparam, kindErrors))
+ // Return as soon as an error is seen if there's nothing to explain.
+ else return List((NoType, NoSymbol, NoKindErrors))
+ }
+ }
+ else Nil
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index d4aa6339ff..97a74c2383 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -23,8 +23,8 @@ trait NameManglers {
val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
- val MODULE_SUFFIX_NAME: TermName = MODULE_SUFFIX_STRING
- val NAME_JOIN_NAME: TermName = NAME_JOIN_STRING
+ val MODULE_SUFFIX_NAME: TermName = newTermName(MODULE_SUFFIX_STRING)
+ val NAME_JOIN_NAME: TermName = newTermName(NAME_JOIN_STRING)
def flattenedName(segments: Name*): NameType = compactedString(segments mkString NAME_JOIN_STRING)
@@ -75,17 +75,17 @@ trait NameManglers {
val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SETTER_SUFFIX = encode("_=")
val SINGLETON_SUFFIX = ".type"
val SPECIALIZED_SUFFIX_STRING = "$sp"
val SUPER_PREFIX_STRING = "super$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ val SETTER_SUFFIX: TermName = encode("_=")
val SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX_STRING
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
- def isImplClassName(name: Name) = stripAnonNumberSuffix(name) endsWith IMPL_CLASS_SUFFIX
+ def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
@@ -124,6 +124,11 @@ trait NameManglers {
name.subName(0, name.lastIndexOf('m') - 1)
else name
)
+
+ def macroMethodName(name: Name) = {
+ val base = if (name.isTypeName) nme.TYPEkw else nme.DEFkw
+ base append nme.MACRO append name
+ }
/** Return the original name and the types on which this name
* is specialized. For example,
@@ -136,7 +141,7 @@ trait NameManglers {
*/
def splitSpecializedName(name: Name): (Name, String, String) =
if (name endsWith SPECIALIZED_SUFFIX_NAME) {
- val name1 = name stripEnd SPECIALIZED_SUFFIX_NAME
+ val name1 = name dropRight SPECIALIZED_SUFFIX_NAME.length
val idxC = name1 lastIndexOf 'c'
val idxM = name1 lastIndexOf 'm'
@@ -147,18 +152,18 @@ trait NameManglers {
(name, "", "")
def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
- def getterToLocal(name: TermName): TermName = name.toTermName append LOCAL_SUFFIX_STRING
- def getterToSetter(name: TermName): TermName = name.toTermName append SETTER_SUFFIX
- def localToGetter(name: TermName): TermName = name stripEnd LOCAL_SUFFIX_STRING toTermName
+ def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
+ def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
+ def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
def setterToGetter(name: TermName): TermName = {
val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
if (p < name.length)
- setterToGetter(name.subName(p + TRAIT_SETTER_SEPARATOR_STRING.length, name.length))
+ setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
else
- name stripEnd SETTER_SUFFIX toTermName
+ name.subName(0, name.length - SETTER_SUFFIX.length)
}
def defaultGetterName(name: Name, pos: Int): TermName = {
@@ -167,43 +172,32 @@ trait NameManglers {
}
def defaultGetterToMethod(name: Name): TermName = {
val p = name.pos(DEFAULT_GETTER_STRING)
- if (p < name.length) name.subName(0, p)
- else name
+ if (p < name.length) name.toTermName.subName(0, p)
+ else name.toTermName
}
- /** !!! I'm putting this logic in place because I can witness
- * trait impls get lifted and acquiring names like 'Foo$class$1'
- * while clearly still being what they were. It's only being used on
- * isImplClassName. However, it's anyone's guess how much more
- * widely this logic actually ought to be applied. Anything which
- * tests for how a name ends is a candidate for breaking down once
- * something is lifted from a method.
- *
- * TODO: resolve this significant problem.
- */
- def stripAnonNumberSuffix(name: Name): Name = {
- val str = "" + name
- if (str == "" || !str.endChar.isDigit) name
- else {
- val idx = name.lastPos('$')
- if (idx < 0 || str.substring(idx + 1).exists(c => !c.isDigit)) name
- else name.subName(0, idx)
- }
- }
+ // This isn't needed at the moment since I fixed $class$1 but
+ // I expect it will be at some point.
+ //
+ // def anonNumberSuffix(name: Name): Name = {
+ // ("" + name) lastIndexOf '$' match {
+ // case -1 => nme.EMPTY
+ // case idx =>
+ // val s = name drop idx
+ // if (s.toString forall (_.isDigit)) s
+ // else nme.EMPTY
+ // }
+ // }
def stripModuleSuffix(name: Name): Name = (
- if (isModuleName(name)) name stripEnd MODULE_SUFFIX_STRING else name
+ if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
)
- /** Note that for performance reasons, stripEnd does not verify that the
- * suffix is actually the suffix specified.
- */
- def dropSingletonName(name: Name): TypeName = name stripEnd SINGLETON_SUFFIX toTypeName
+ def dropSingletonName(name: Name): TypeName = name dropRight SINGLETON_SUFFIX.length toTypeName
def singletonName(name: Name): TypeName = name append SINGLETON_SUFFIX toTypeName
def implClassName(name: Name): TypeName = name append IMPL_CLASS_SUFFIX toTypeName
- def interfaceName(implname: Name): TypeName = implname stripEnd IMPL_CLASS_SUFFIX toTypeName
+ def interfaceName(implname: Name): TypeName = implname dropRight IMPL_CLASS_SUFFIX.length toTypeName
def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
- def productAccessorName(i: Int): TermName = newTermName("_" + i)
def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
/** The name of an accessor for protected symbols. */
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index 11be9c80d1..e6ca4c49ba 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -71,39 +71,58 @@ trait Names extends api.Names {
}
/** Create a term name from the characters in cs[offset..offset+len-1]. */
- def newTermName(cs: Array[Char], offset: Int, len: Int): TermName = {
+ def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
+ newTermName(cs, offset, len, cachedString = null)
+
+ def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
+ def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
+
+ /** Create a term name from the characters in cs[offset..offset+len-1].
+ * TODO - have a mode where name validation is performed at creation time
+ * (e.g. if a name has the string "$class" in it, then fail if that
+ * string is not at the very end.)
+ */
+ protected def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
n = n.next
- if (n eq null) {
+
+ if (n ne null) n
+ else {
// The logic order here is future-proofing against the possibility
// that name.toString will become an eager val, in which case the call
// to enterChars cannot follow the construction of the TermName.
val ncStart = nc
enterChars(cs, offset, len)
- n = new TermName(ncStart, len, h)
+ if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
+ else new TermName_R(ncStart, len, h)
}
- n
}
+ protected def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
+ newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
- def newTermName(s: String): TermName =
- newTermName(s.toCharArray(), 0, s.length())
+ def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
+
+ /** Create a type name from string. */
+ def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
- val chars = Codec fromUTF8 bs.slice(offset, offset + len)
+ val chars = Codec.fromUTF8(bs, offset, len)
newTermName(chars, 0, chars.length)
}
+ def newTermNameCached(s: String): TermName =
+ newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
+
+ def newTypeNameCached(s: String): TypeName =
+ newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
+
/** Create a type name from the characters in cs[offset..offset+len-1]. */
def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
- newTermName(cs, offset, len).toTypeName
-
- /** Create a type name from string. */
- def newTypeName(s: String): TypeName =
- newTermName(s).toTypeName
+ newTermName(cs, offset, len, cachedString = null).toTypeName
/** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
@@ -114,19 +133,27 @@ trait Names extends api.Names {
// Classes ----------------------------------------------------------------------
- /** The name class. */
+ /** The name class.
+ * TODO - resolve schizophrenia regarding whether to treat Names as Strings
+ * or Strings as Names. Give names the key functions the absence of which
+ * make people want Strings all the time.
+ */
sealed abstract class Name(protected val index: Int, protected val len: Int) extends AbsName with Function1[Int, Char] {
+ type ThisNameType <: Name
+ protected[this] def thisName: ThisNameType
+
/** Index into name table */
def start: Int = index
/** The next name in the same hash bucket. */
- def next: Name
+ def next: ThisNameType
/** The length of this name. */
final def length: Int = len
final def isEmpty = length == 0
final def nonEmpty = !isEmpty
+ def nameKind: String
def isTermName: Boolean
def isTypeName: Boolean
def toTermName: TermName
@@ -134,6 +161,15 @@ trait Names extends api.Names {
def companionName: Name
def bothNames: List[Name] = List(toTermName, toTypeName)
+ /** Return the subname with characters from from to to-1. */
+ def subName(from: Int, to: Int): ThisNameType
+
+ /** Return a new name of the same variety. */
+ def newName(str: String): ThisNameType
+
+ /** Return a new name based on string transformation. */
+ def mapName(f: String => String): ThisNameType = newName(f(toString))
+
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
compat.Platform.arraycopy(chrs, index, cs, offset, len)
@@ -145,21 +181,13 @@ trait Names extends api.Names {
cs
}
- /** @return the string representation of this name */
- final override def toString(): String = new String(chrs, index, len)
- // Should we opt to make toString into a val to avoid the creation
- // of 750,000 copies of x$1, here's the line.
- // final override val toString = new String(chrs, index, len)
-
- def debugString() = NameTransformer.decode(toString) + (if (isTypeName) "!" else "")
-
/** Write to UTF8 representation of this name to given character array.
* Start copying to index `to`. Return index of next free byte in array.
* Array must have enough remaining space for all bytes
* (i.e. maximally 3*length bytes).
*/
final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
- val bytes = Codec toUTF8 chrs.slice(index, index + len)
+ val bytes = Codec.toUTF8(chrs, index, len)
compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
offset + bytes.length
}
@@ -326,17 +354,20 @@ trait Names extends api.Names {
final def startsWith(name: String): Boolean = startsWith(newTermName(name))
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
final def endsWith(name: String): Boolean = endsWith(newTermName(name))
- final def stripStart(prefix: Name): Name = subName(prefix.length, len)
- final def stripStart(prefix: String): Name = subName(prefix.length, len)
- final def stripEnd(suffix: Name): Name = subName(0, len - suffix.length)
- final def stripEnd(suffix: String): Name = subName(0, len - suffix.length)
-
- def dropRight(n: Int) = subName(0, len - n)
-
- def lastIndexOf(ch: Char) = toChars lastIndexOf ch
- /** Return the subname with characters from from to to-1. */
- def subName(from: Int, to: Int): Name
+ def dropRight(n: Int) = subName(0, len - n)
+ def drop(n: Int) = subName(n, len)
+
+ def indexOf(ch: Char) = {
+ val idx = pos(ch)
+ if (idx == length) -1 else idx
+ }
+ def indexOf(ch: Char, fromIndex: Int) = {
+ val idx = pos(ch, fromIndex)
+ if (idx == length) -1 else idx
+ }
+ def lastIndexOf(ch: Char) = lastPos(ch)
+ def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
/** Replace all occurrences of `from` by `to` in
* name; result is always a term name.
@@ -351,31 +382,86 @@ trait Names extends api.Names {
}
newTermName(cs, 0, len)
}
+
+ /** TODO - reconcile/fix that encode returns a Name but
+ * decode returns a String.
+ */
+ /** !!! Duplicative but consistently named.
+ */
+ def decoded: String = decode
+ def encoded: String = "" + encode
+ // def decodedName: ThisNameType = newName(decoded)
+ def encodedName: ThisNameType = encode
+
/** Replace operator symbols by corresponding $op_name. */
- def encode: Name = {
+ def encode: ThisNameType = {
val str = toString
val res = NameTransformer.encode(str)
- if (res == str) this
- else if (isTypeName) newTypeName(res)
- else newTermName(res)
+ if (res == str) thisName else newName(res)
}
- def append(ch: Char): Name
- def append(suffix: String): Name
- def append(suffix: Name): Name
-
/** Replace $op_name by corresponding operator symbol. */
- def decode: String = (
- NameTransformer.decode(toString) +
- (if (nameDebug && isTypeName) "!" else ""))//debug
+ def decode: String = {
+ if (this containsChar '$') {
+ val str = toString
+ val res = NameTransformer.decode(str)
+ if (res == str) str
+ else res
+ }
+ else toString
+ }
+ /** TODO - find some efficiency. */
+ def append(ch: Char) = newName("" + this + ch)
+ def append(suffix: String) = newName("" + this + suffix)
+ def append(suffix: Name) = newName("" + this + suffix)
+ def prepend(ch: Char) = newName("" + ch + this)
+ def prepend(prefix: String) = newName("" + prefix + this)
+ def prepend(prefix: Name) = newName("" + prefix + this)
+
+ def decodedName: ThisNameType = newName(decode)
def isOperatorName: Boolean = decode != toString
- def nameKind: String = if (isTypeName) "type" else "term"
- def longString: String = nameKind + " " + NameTransformer.decode(toString)
+ def longString: String = nameKind + " " + decode
+ def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
+ }
+
+ /** A name that contains no operator chars nor dollar signs.
+ * TODO - see if it's any faster to do something along these lines.
+ */
+ trait AlphaNumName extends Name {
+ final override def encode = thisName
+ final override def decodedName = thisName
+ final override def decode = toString
+ final override def isOperatorName = false
+ }
+
+ /** TermName_S and TypeName_S have fields containing the string version of the name.
+ * TermName_R and TypeName_R recreate it each time toString is called.
+ */
+ private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
+ override def newName(str: String): TermName = newTermNameCached(str)
+ }
+ private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
+ override def newName(str: String): TypeName = newTypeNameCached(str)
+ }
+
+ private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
}
- final class TermName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
+ private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
+ }
+
+ sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TermName
+ protected[this] def thisName: TermName = this
+
var next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
@@ -385,20 +471,24 @@ trait Names extends api.Names {
val h = hashValue(chrs, index, len) & HASH_MASK
var n = typeHashtable(h)
while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TypeName(index, len, h);
- n
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
}
- def append(ch: Char): TermName = append("" + ch)
- def append(suffix: String): TermName = newTermName(this + suffix)
- def append(suffix: Name): TermName = append(suffix.toString)
+ def newName(str: String): TermName = newTermName(str)
def companionName: TypeName = toTypeName
def subName(from: Int, to: Int): TermName =
newTermName(chrs, start + from, to - from)
+
+ def nameKind = "term"
+ protected def createCompanionName(h: Int): TypeName
}
- final class TypeName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
+ sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TypeName
+ protected[this] def thisName: TypeName = this
+
var next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
@@ -407,18 +497,19 @@ trait Names extends api.Names {
val h = hashValue(chrs, index, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TermName(index, len, h);
- n
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
}
def toTypeName: TypeName = this
-
- def append(ch: Char): TypeName = append("" + ch)
- def append(suffix: String): TypeName = newTypeName(this + suffix)
- def append(suffix: Name): TypeName = append(suffix.toString)
+ def newName(str: String): TypeName = newTypeName(str)
def companionName: TermName = toTermName
def subName(from: Int, to: Int): TypeName =
newTypeName(chrs, start + from, to - from)
+
+ def nameKind = "type"
+ override def decode = if (nameDebug) super.decode + "!" else super.decode
+ protected def createCompanionName(h: Int): TermName
}
}
diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala
index fb3012adff..37464ebf29 100644
--- a/src/compiler/scala/reflect/internal/Scopes.scala
+++ b/src/compiler/scala/reflect/internal/Scopes.scala
@@ -37,9 +37,18 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
def unapplySeq(decls: Scope): Some[Seq[Symbol]] = Some(decls.toList)
}
- class Scope(initElems: ScopeEntry) extends Iterable[Symbol] {
+ /** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
+ */
+ class Scope protected[Scopes] (initElems: ScopeEntry = null) extends Iterable[Symbol] {
+
+ protected[Scopes] def this(base: Scope) = {
+ this(base.elems)
+ nestinglevel = base.nestinglevel + 1
+ }
- var elems: ScopeEntry = initElems
+ private[scala] var elems: ScopeEntry = initElems
/** The number of times this scope is nested in another
*/
@@ -65,20 +74,8 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (size >= MIN_HASH) createHash()
- def this() = this(null: ScopeEntry)
-
- def this(base: Scope) = {
- this(base.elems)
- nestinglevel = base.nestinglevel + 1
- }
-
- def this(decls: List[Symbol]) = {
- this()
- decls foreach enter
- }
-
/** Returns a new scope with the same content as this one. */
- def cloneScope: Scope = new Scope(this.toList)
+ def cloneScope: Scope = newScopeWith(this.toList: _*)
/** is the scope empty? */
override def isEmpty: Boolean = elems eq null
@@ -123,7 +120,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
* @param sym ...
*/
def enterUnique(sym: Symbol) {
- assert(lookup(sym.name) == NoSymbol)
+ assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
enter(sym)
}
@@ -311,7 +308,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
override def foreach[U](p: Symbol => U): Unit = toList foreach p
override def filter(p: Symbol => Boolean): Scope =
- if (!(toList forall p)) new Scope(toList filter p) else this
+ if (!(toList forall p)) newScopeWith(toList filter p: _*) else this
override def mkString(start: String, sep: String, end: String) =
toList.map(_.defString).mkString(start, sep, end)
@@ -321,21 +318,26 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** Create a new scope */
- def newScope: Scope = new Scope
+ def newScope: Scope = new Scope()
+
+ /** Create a new scope nested in another one with which it shares its elements */
+ def newNestedScope(outer: Scope): Scope = new Scope(outer)
+
+ /** Create a new scope with given initial elements */
+ def newScopeWith(elems: Symbol*): Scope = {
+ val scope = newScope
+ elems foreach scope.enter
+ scope
+ }
/** Create new scope for the members of package `pkg` */
- def newPackageScope(pkgClass: Symbol): Scope = new Scope
+ def newPackageScope(pkgClass: Symbol): Scope = newScope
/** Transform scope of members of `owner` using operation `op`
* This is overridden by the reflective compiler to avoid creating new scopes for packages
*/
def scopeTransform(owner: Symbol)(op: => Scope): Scope = op
- def newScopeWith(elems: Symbol*): Scope = {
- val scope = newScope
- elems foreach scope.enter
- scope
- }
/** The empty scope (immutable).
*/
@@ -347,7 +349,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** The error scope.
*/
- class ErrorScope(owner: Symbol) extends Scope(null: ScopeEntry)
+ class ErrorScope(owner: Symbol) extends Scope
private final val maxRecursions = 1000
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 8afe276514..1f67bbc0ac 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -8,18 +8,21 @@ package internal
import scala.collection.immutable
import NameTransformer.MODULE_SUFFIX_STRING
+import annotation.switch
-trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: SymbolTable =>
+trait StdNames extends NameManglers { self: SymbolTable =>
- def encode(str: String): TermName = newTermName(NameTransformer.encode(str))
+ def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
- implicit def stringToTermName(s: String): TermName = newTermName(s)
+ implicit def lowerTermNames(n: TermName): String = "" + n
+
+ // implicit def stringToTermName(s: String): TermName = newTermName(s)
/** This should be the first trait in the linearization. */
trait Keywords {
private var kws: Set[TermName] = Set()
private def kw(s: String): TermName = {
- val result = newTermName(s)
+ val result = newTermNameCached(s)
kws = kws + result
result
}
@@ -87,7 +90,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
trait CommonNames /*extends LibraryCommonNames*/ {
type NameType <: Name
- implicit def createNameType(name: String): NameType
+ protected implicit def createNameType(name: String): NameType
val EMPTY: NameType = ""
val ANON_FUN_NAME: NameType = "$anonfun"
@@ -146,6 +149,13 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val String: NameType = "String"
final val Throwable: NameType = "Throwable"
+ final val Annotation: NameType = "Annotation"
+ final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
+ final val Enum: NameType = "Enum"
+
+ final val Tree: NameType = "Tree"
+ final val TypeTree: NameType = "TypeTree"
+
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
@@ -172,123 +182,223 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val SyntheticATTR: NameType = "Synthetic"
}
-
trait TermNames extends Keywords with CommonNames {
// Compiler internal names
+ val EXPAND_SEPARATOR_STRING = "$$"
+
val ANYNAME: NameType = "<anyname>"
val CONSTRUCTOR: NameType = "<init>"
val FAKE_LOCAL_THIS: NameType = "this$"
val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
+ val LAZY_LOCAL: NameType = "$lzy"
+ val LOCAL_SUFFIX_STRING = " "
+ val MACRO: NameType = "macro$"
+ val MIRROR_PREFIX: NameType = "$mr."
+ val MIRROR_SHORT: NameType = "$mr"
val MIXIN_CONSTRUCTOR: NameType = "$init$"
val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: NameType = "$outer"
- val OUTER_LOCAL: NameType = "$outer " // note the space
+ val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
+ val SELECTOR_DUMMY: NameType = "<unapply-selector>"
val SELF: NameType = "$this"
val SPECIALIZED_INSTANCE: NameType = "specInstance$"
val STAR: NameType = "*"
val THIS: NameType = "_$this"
- val SELECTOR_DUMMY: NameType = "<unapply-selector>"
final val Nil: NameType = "Nil"
final val Predef: NameType = "Predef"
final val ScalaRunTime: NameType = "ScalaRunTime"
final val Some: NameType = "Some"
+ val _1 : NameType = "_1"
+ val _2 : NameType = "_2"
+ val _3 : NameType = "_3"
+ val _4 : NameType = "_4"
+ val _5 : NameType = "_5"
+ val _6 : NameType = "_6"
+ val _7 : NameType = "_7"
+ val _8 : NameType = "_8"
+ val _9 : NameType = "_9"
+ val _10 : NameType = "_10"
+ val _11 : NameType = "_11"
+ val _12 : NameType = "_12"
+ val _13 : NameType = "_13"
+ val _14 : NameType = "_14"
+ val _15 : NameType = "_15"
+ val _16 : NameType = "_16"
+ val _17 : NameType = "_17"
+ val _18 : NameType = "_18"
+ val _19 : NameType = "_19"
+ val _20 : NameType = "_20"
+ val _21 : NameType = "_21"
+ val _22 : NameType = "_22"
+
+ val x_0 : NameType = "x$0"
+ val x_1 : NameType = "x$1"
+ val x_2 : NameType = "x$2"
+ val x_3 : NameType = "x$3"
+ val x_4 : NameType = "x$4"
+ val x_5 : NameType = "x$5"
+ val x_6 : NameType = "x$6"
+ val x_7 : NameType = "x$7"
+ val x_8 : NameType = "x$8"
+ val x_9 : NameType = "x$9"
+
+ @switch def syntheticParamName(i: Int): TermName = i match {
+ case 0 => nme.x_0
+ case 1 => nme.x_1
+ case 2 => nme.x_2
+ case 3 => nme.x_3
+ case 4 => nme.x_4
+ case 5 => nme.x_5
+ case 6 => nme.x_6
+ case 7 => nme.x_7
+ case 8 => nme.x_8
+ case 9 => nme.x_9
+ case _ => newTermName("x$" + i)
+ }
+
+ val wrapRefArray: NameType = "wrapRefArray"
+ val wrapByteArray: NameType = "wrapByteArray"
+ val wrapShortArray: NameType = "wrapShortArray"
+ val wrapCharArray: NameType = "wrapCharArray"
+ val wrapIntArray: NameType = "wrapIntArray"
+ val wrapLongArray: NameType = "wrapLongArray"
+ val wrapFloatArray: NameType = "wrapFloatArray"
+ val wrapDoubleArray: NameType = "wrapDoubleArray"
+ val wrapBooleanArray: NameType = "wrapBooleanArray"
+ val wrapUnitArray: NameType = "wrapUnitArray"
+ val genericWrapArray: NameType = "genericWrapArray"
+
// Compiler utilized names
// val productElementName: NameType = "productElementName"
- val TYPE_ : NameType = "TYPE"
- val add_ : NameType = "add"
- val anyValClass: NameType = "anyValClass"
- val append: NameType = "append"
- val apply: NameType = "apply"
- val arrayValue: NameType = "arrayValue"
- val arraycopy: NameType = "arraycopy"
- val asInstanceOf_ : NameType = "asInstanceOf"
- val assert_ : NameType = "assert"
- val assume_ : NameType = "assume"
- val box: NameType = "box"
- val bytes: NameType = "bytes"
- val canEqual_ : NameType = "canEqual"
- val checkInitialized: NameType = "checkInitialized"
- val classOf: NameType = "classOf"
- val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
- val conforms: NameType = "conforms"
- val copy: NameType = "copy"
- val delayedInit: NameType = "delayedInit"
- val delayedInitArg: NameType = "delayedInit$body"
- val drop: NameType = "drop"
- val elem: NameType = "elem"
- val eq: NameType = "eq"
- val equals_ : NameType = if (forMSIL) "Equals" else "equals"
- val error: NameType = "error"
- val ex: NameType = "ex"
- val false_ : NameType = "false"
- val filter: NameType = "filter"
- val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
- val find_ : NameType = "find"
- val flatMap: NameType = "flatMap"
- val foreach: NameType = "foreach"
- val formatted: NameType = "formatted"
- val genericArrayOps: NameType = "genericArrayOps"
- val get: NameType = "get"
- val hasNext: NameType = "hasNext"
- val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
- val hash_ : NameType = "hash"
- val head: NameType = "head"
- val identity: NameType = "identity"
- val inlinedEquals: NameType = "inlinedEquals"
- val applyDynamic: NameType = "applyDynamic"
- val isArray: NameType = "isArray"
- val isDefinedAt: NameType = "isDefinedAt"
- val _isDefinedAt: NameType = "_isDefinedAt"
- val isEmpty: NameType = "isEmpty"
- val isInstanceOf_ : NameType = "isInstanceOf"
- val java: NameType = "java"
- val lang: NameType = "lang"
- val length: NameType = "length"
- val lengthCompare: NameType = "lengthCompare"
- val lift_ : NameType = "lift"
- val macro_ : NameType = "macro"
- val main: NameType = "main"
- val map: NameType = "map"
- val missingCase: NameType = "missingCase"
- val ne: NameType = "ne"
- val newArray: NameType = "newArray"
- val next: NameType = "next"
- val notifyAll_ : NameType = "notifyAll"
- val notify_ : NameType = "notify"
- val null_ : NameType = "null"
- val ofDim: NameType = "ofDim"
- val productArity: NameType = "productArity"
- val productElement: NameType = "productElement"
- val productIterator: NameType = "productIterator"
- val productPrefix: NameType = "productPrefix"
- val readResolve: NameType = "readResolve"
- val runOrElse: NameType = "runOrElse"
- val sameElements: NameType = "sameElements"
- val scala_ : NameType = "scala"
- val self: NameType = "self"
- val setAccessible: NameType = "setAccessible"
- val synchronized_ : NameType = "synchronized"
- val tail: NameType = "tail"
- val this_ : NameType = "this"
- val throw_ : NameType = "throw"
- val toArray: NameType = "toArray"
- val toList: NameType = "toList"
- val toSeq: NameType = "toSeq"
- val toString_ : NameType = if (forMSIL) "ToString" else "toString"
- val true_ : NameType = "true"
- val unapply: NameType = "unapply"
- val unapplySeq: NameType = "unapplySeq"
- val unbox: NameType = "unbox"
- val update: NameType = "update"
- val value: NameType = "value"
- val view_ : NameType = "view"
- val wait_ : NameType = "wait"
- val withFilter: NameType = "withFilter"
- val wrapRefArray: NameType = "wrapRefArray"
- val zip: NameType = "zip"
+ val Ident: NameType = "Ident"
+ val StringContext: NameType = "StringContext"
+ val This: NameType = "This"
+ val Tree : NameType = "Tree"
+ val TYPE_ : NameType = "TYPE"
+ val TypeTree: NameType = "TypeTree"
+ val UNIT : NameType = "UNIT"
+ val _isDefinedAt: NameType = "_isDefinedAt"
+ val add_ : NameType = "add"
+ val annotation: NameType = "annotation"
+ val anyValClass: NameType = "anyValClass"
+ val append: NameType = "append"
+ val apply: NameType = "apply"
+ val applyDynamic: NameType = "applyDynamic"
+ val args : NameType = "args"
+ val argv : NameType = "argv"
+ val arrayValue: NameType = "arrayValue"
+ val array_apply : NameType = "array_apply"
+ val array_clone : NameType = "array_clone"
+ val array_length : NameType = "array_length"
+ val array_update : NameType = "array_update"
+ val arraycopy: NameType = "arraycopy"
+ val asInstanceOf_ : NameType = "asInstanceOf"
+ val asInstanceOf_Ob : NameType = "$asInstanceOf"
+ val asTypeConstructor: NameType = "asTypeConstructor"
+ val assert_ : NameType = "assert"
+ val assume_ : NameType = "assume"
+ val box: NameType = "box"
+ val bytes: NameType = "bytes"
+ val canEqual_ : NameType = "canEqual"
+ val checkInitialized: NameType = "checkInitialized"
+ val classOf: NameType = "classOf"
+ val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val conforms: NameType = "conforms"
+ val copy: NameType = "copy"
+ val delayedInit: NameType = "delayedInit"
+ val delayedInitArg: NameType = "delayedInit$body"
+ val drop: NameType = "drop"
+ val elem: NameType = "elem"
+ val emptyValDef: NameType = "emptyValDef"
+ val ensureAccessible : NameType = "ensureAccessible"
+ val eq: NameType = "eq"
+ val equalsNumChar : NameType = "equalsNumChar"
+ val equalsNumNum : NameType = "equalsNumNum"
+ val equalsNumObject : NameType = "equalsNumObject"
+ val equals_ : NameType = if (forMSIL) "Equals" else "equals"
+ val error: NameType = "error"
+ val ex: NameType = "ex"
+ val false_ : NameType = "false"
+ val filter: NameType = "filter"
+ val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
+ val find_ : NameType = "find"
+ val flatMap: NameType = "flatMap"
+ val foreach: NameType = "foreach"
+ val freeValue : NameType = "freeValue"
+ val genericArrayOps: NameType = "genericArrayOps"
+ val get: NameType = "get"
+ val hasNext: NameType = "hasNext"
+ val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
+ val hash_ : NameType = "hash"
+ val head: NameType = "head"
+ val identity: NameType = "identity"
+ val inlinedEquals: NameType = "inlinedEquals"
+ val isArray: NameType = "isArray"
+ val isDefinedAt: NameType = "isDefinedAt"
+ val isEmpty: NameType = "isEmpty"
+ val isInstanceOf_ : NameType = "isInstanceOf"
+ val isInstanceOf_Ob : NameType = "$isInstanceOf"
+ val java: NameType = "java"
+ val lang: NameType = "lang"
+ val length: NameType = "length"
+ val lengthCompare: NameType = "lengthCompare"
+ val lift_ : NameType = "lift"
+ val macro_ : NameType = "macro"
+ val macroThis : NameType = "_this"
+ val macroContext : NameType = "_context"
+ val main: NameType = "main"
+ val map: NameType = "map"
+ val mirror : NameType = "mirror"
+ val missingCase: NameType = "missingCase"
+ val ne: NameType = "ne"
+ val newArray: NameType = "newArray"
+ val newScopeWith: NameType = "newScopeWith"
+ val next: NameType = "next"
+ val notifyAll_ : NameType = "notifyAll"
+ val notify_ : NameType = "notify"
+ val null_ : NameType = "null"
+ val ofDim: NameType = "ofDim"
+ val productArity: NameType = "productArity"
+ val productElement: NameType = "productElement"
+ val productIterator: NameType = "productIterator"
+ val productPrefix: NameType = "productPrefix"
+ val readResolve: NameType = "readResolve"
+ val runOrElse: NameType = "runOrElse"
+ val runtime: NameType = "runtime"
+ val sameElements: NameType = "sameElements"
+ val scala_ : NameType = "scala"
+ val self: NameType = "self"
+ val setAccessible: NameType = "setAccessible"
+ val setAnnotations: NameType = "setAnnotations"
+ val setSymbol: NameType = "setSymbol"
+ val setType: NameType = "setType"
+ val setTypeSignature: NameType = "setTypeSignature"
+ val synchronized_ : NameType = "synchronized"
+ val tail: NameType = "tail"
+ val thisModuleType: NameType = "thisModuleType"
+ val this_ : NameType = "this"
+ val throw_ : NameType = "throw"
+ val toArray: NameType = "toArray"
+ val toList: NameType = "toList"
+ val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
+ val toString_ : NameType = if (forMSIL) "ToString" else "toString"
+ val true_ : NameType = "true"
+ val typedProductIterator: NameType = "typedProductIterator"
+ val unapply: NameType = "unapply"
+ val unapplySeq: NameType = "unapplySeq"
+ val unbox: NameType = "unbox"
+ val update: NameType = "update"
+ val value: NameType = "value"
+ val valueOf : NameType = "valueOf"
+ val values : NameType = "values"
+ val view_ : NameType = "view"
+ val wait_ : NameType = "wait"
+ val withFilter: NameType = "withFilter"
+ val zip: NameType = "zip"
// unencoded operators
object raw {
@@ -316,21 +426,53 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val toLong: NameType = "toLong"
val toFloat: NameType = "toFloat"
val toDouble: NameType = "toDouble"
+
+ // primitive operation methods for structual types mostly
+ // overlap with the above, but not for these two.
+ val toCharacter: NameType = "toCharacter"
+ val toInteger: NameType = "toInteger"
}
- object tpnme extends TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
+ object tpnme extends AbsTypeNames with TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
val REFINE_CLASS_NAME: NameType = "<refinement>"
val ANON_CLASS_NAME: NameType = "$anon"
}
+ /** For fully qualified type names.
+ */
+ object fulltpnme extends TypeNames {
+ type NameType = TypeName
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
+
+ val RuntimeNothing: NameType = "scala.runtime.Nothing$"
+ val RuntimeNull: NameType = "scala.runtime.Null$"
+ val JavaLangEnum: NameType = "java.lang.Enum"
+ }
+
+ /** Java binary names, like scala/runtime/Nothing$.
+ */
+ object binarynme {
+ def toBinary(name: Name) = name mapName (_.replace('.', '/'))
+
+ val RuntimeNothing = toBinary(fulltpnme.RuntimeNothing).toTypeName
+ val RuntimeNull = toBinary(fulltpnme.RuntimeNull).toTypeName
+ }
+
+ object fullnme extends TermNames {
+ type NameType = TermName
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
+
+ val MirrorPackage: NameType = "scala.reflect.mirror"
+ }
+
val javanme = nme.javaKeywords
- object nme extends TermNames /*with LibraryTermNames*/ with TermNameMangling {
+ object nme extends AbsTermNames with TermNames /*with LibraryTermNames*/ with TermNameMangling {
type NameType = TermName
- def createNameType(name: String): TermName = newTermName(name)
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Translate a String into a list of simple TypeNames and TermNames.
* In all segments before the last, type/term is determined by whether
@@ -363,7 +505,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
case -1 => if (name == "") scala.Nil else scala.List(mkName(name, assumeTerm))
// otherwise, we can tell based on whether '#' or '.' is the following char.
case idx =>
- val (simple, div, rest) = (name take idx, name charAt idx, name drop (idx + 1))
+ val (simple, div, rest) = (name take idx, name charAt idx, newTermName(name) drop (idx + 1))
mkName(simple, div == '.') :: segments(rest, assumeTerm)
}
}
@@ -380,26 +522,27 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
/** The expanded name of `name` relative to this class `base` with given `separator`
*/
def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
- newTermName(base.fullName('$') + separator + name)
+ newTermNameCached(base.fullName('$') + separator + name)
- def moduleVarName(name: TermName): TermName = newTermName("" + name + MODULE_VAR_SUFFIX)
+ def moduleVarName(name: TermName): TermName =
+ newTermNameCached("" + name + MODULE_VAR_SUFFIX)
- val EXPAND_SEPARATOR_STRING = "$$"
- val LOCAL_SUFFIX_STRING = " "
val ROOTPKG: TermName = "_root_"
/** Base strings from which synthetic names are derived. */
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DO_WHILE_PREFIX = "doWhile$"
- val EQEQ_LOCAL_VAR = "eqEqTemp$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
- val WHILE_PREFIX = "while$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EQEQ_LOCAL_VAR_STRING = "eqEqTemp$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val INTERPRETER_LINE_PREFIX = "line"
+ val INTERPRETER_VAR_PREFIX = "res"
+ val INTERPRETER_WRAPPER_SUFFIX = "$object"
+ val WHILE_PREFIX = "while$"
+
+ val EQEQ_LOCAL_VAR: TermName = newTermName(EQEQ_LOCAL_VAR_STRING)
def getCause = sn.GetCause
def getClass_ = sn.GetClass
@@ -424,8 +567,8 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val MUL = encode("*")
val NE = encode("!=")
val OR = encode("|")
- val PLUS = encode("+")
- val SUB = encode("-")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val SUB = MINUS // ... as does SUB with PLUS
val XOR = encode("^")
val ZAND = encode("&&")
val ZOR = encode("||")
@@ -435,10 +578,138 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val UNARY_+ = encode("unary_+")
val UNARY_- = encode("unary_-")
val UNARY_! = encode("unary_!")
+
+ // Grouped here so Cleanup knows what tests to perform.
+ val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
+ val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
+ val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
+ val NumberOpNames = (
+ Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
+ ++ Set(UNARY_+, UNARY_-, UNARY_!)
+ ++ ConversionNames
+ ++ CommonOpNames
+ )
+
+ val add: NameType = "add"
+ val complement: NameType = "complement"
+ val divide: NameType = "divide"
+ val multiply: NameType = "multiply"
+ val negate: NameType = "negate"
+ val positive: NameType = "positive"
+ val shiftLogicalRight: NameType = "shiftLogicalRight"
+ val shiftSignedLeft: NameType = "shiftSignedLeft"
+ val shiftSignedRight: NameType = "shiftSignedRight"
+ val subtract: NameType = "subtract"
+ val takeAnd: NameType = "takeAnd"
+ val takeConditionalAnd: NameType = "takeConditionalAnd"
+ val takeConditionalOr: NameType = "takeConditionalOr"
+ val takeModulo: NameType = "takeModulo"
+ val takeNot: NameType = "takeNot"
+ val takeOr: NameType = "takeOr"
+ val takeXor: NameType = "takeXor"
+ val testEqual: NameType = "testEqual"
+ val testGreaterOrEqualThan: NameType = "testGreaterOrEqualThan"
+ val testGreaterThan: NameType = "testGreaterThan"
+ val testLessOrEqualThan: NameType = "testLessOrEqualThan"
+ val testLessThan: NameType = "testLessThan"
+ val testNotEqual: NameType = "testNotEqual"
+
+ val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean"
+ val isBoxedNumber: NameType = "isBoxedNumber"
+
+ def toUnaryName(name: TermName): TermName = name match {
+ case raw.MINUS => UNARY_-
+ case raw.PLUS => UNARY_+
+ case raw.TILDE => UNARY_~
+ case raw.BANG => UNARY_!
+ case _ => name
+ }
+ /** The name of a method which stands in for a primitive operation
+ * during structural type dispatch.
+ */
+ def primitiveInfixMethodName(name: Name): TermName = name match {
+ case OR => takeOr
+ case XOR => takeXor
+ case AND => takeAnd
+ case EQ => testEqual
+ case NE => testNotEqual
+ case ADD => add
+ case SUB => subtract
+ case MUL => multiply
+ case DIV => divide
+ case MOD => takeModulo
+ case LSL => shiftSignedLeft
+ case LSR => shiftLogicalRight
+ case ASR => shiftSignedRight
+ case LT => testLessThan
+ case LE => testLessOrEqualThan
+ case GE => testGreaterOrEqualThan
+ case GT => testGreaterThan
+ case ZOR => takeConditionalOr
+ case ZAND => takeConditionalAnd
+ case _ => NO_NAME
+ }
+ /** Postfix/prefix, really.
+ */
+ def primitivePostfixMethodName(name: Name): TermName = name match {
+ case UNARY_! => takeNot
+ case UNARY_+ => positive
+ case UNARY_- => negate
+ case UNARY_~ => complement
+ case `toByte` => toByte
+ case `toShort` => toShort
+ case `toChar` => toCharacter
+ case `toInt` => toInteger
+ case `toLong` => toLong
+ case `toFloat` => toFloat
+ case `toDouble` => toDouble
+ case _ => NO_NAME
+ }
+
+ val reflPolyCacheName: NameType = "reflPoly$Cache"
+ val reflClassCacheName: NameType = "reflClass$Cache"
+ val reflParamsCacheName: NameType = "reflParams$Cache"
+ val reflMethodCacheName: NameType = "reflMethod$Cache"
+ val reflMethodName: NameType = "reflMethod$Method"
+
+ private val reflectionCacheNames = Set[NameType](
+ reflPolyCacheName,
+ reflClassCacheName,
+ reflParamsCacheName,
+ reflMethodCacheName,
+ reflMethodName
+ )
+ def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
+
+ @switch def productAccessorName(j: Int): TermName = j match {
+ case 1 => nme._1
+ case 2 => nme._2
+ case 3 => nme._3
+ case 4 => nme._4
+ case 5 => nme._5
+ case 6 => nme._6
+ case 7 => nme._7
+ case 8 => nme._8
+ case 9 => nme._9
+ case 10 => nme._10
+ case 11 => nme._11
+ case 12 => nme._12
+ case 13 => nme._13
+ case 14 => nme._14
+ case 15 => nme._15
+ case 16 => nme._16
+ case 17 => nme._17
+ case 18 => nme._18
+ case 19 => nme._19
+ case 20 => nme._20
+ case 21 => nme._21
+ case 22 => nme._22
+ case _ => newTermName("_" + j)
+ }
}
abstract class SymbolNames {
- protected implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+ protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
val BeanProperty : TypeName
val BooleanBeanProperty : TypeName
@@ -446,7 +717,6 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val BoxedCharacter : TypeName
val BoxedNumber : TypeName
val Class : TypeName
- val Code : TypeName
val Delegate : TypeName
val IOOBException : TypeName // IndexOutOfBoundsException
val InvTargetException : TypeName // InvocationTargetException
@@ -471,7 +741,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
class JavaKeywords {
private var kws: Set[TermName] = Set()
private def kw(s: String): TermName = {
- val result = newTermName(s)
+ val result = newTermNameCached(s)
kws = kws + result
result
}
@@ -555,12 +825,12 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val Throwable: TypeName = "java.lang.Throwable"
final val ValueType: TypeName = tpnme.NO_NAME
- final val ForName: TermName = "forName"
- final val GetCause: TermName = "getCause"
- final val GetClass: TermName = "getClass"
- final val GetMethod: TermName = "getMethod"
- final val Invoke: TermName = "invoke"
- final val JavaLang: TermName = "java.lang"
+ final val ForName: TermName = newTermName("forName")
+ final val GetCause: TermName = newTermName("getCause")
+ final val GetClass: TermName = newTermName("getClass")
+ final val GetMethod: TermName = newTermName("getMethod")
+ final val Invoke: TermName = newTermName("invoke")
+ final val JavaLang: TermName = newTermName("java.lang")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
@@ -581,7 +851,6 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val BoxedCharacter: TypeName = "System.IConvertible"
final val BoxedNumber: TypeName = "System.IConvertible"
final val Class: TypeName = "System.Type"
- final val Code: TypeName = tpnme.NO_NAME
final val Delegate: TypeName = "System.MulticastDelegate"
final val IOOBException: TypeName = "System.IndexOutOfRangeException"
final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
@@ -593,12 +862,12 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val Throwable: TypeName = "System.Exception"
final val ValueType: TypeName = "System.ValueType"
- final val ForName: TermName = "GetType"
- final val GetCause: TermName = "InnerException" /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = "GetType"
- final val GetMethod: TermName = "GetMethod"
- final val Invoke: TermName = "Invoke"
- final val JavaLang: TermName = "System"
+ final val ForName: TermName = newTermName("GetType")
+ final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
+ final val GetClass: TermName = newTermName("GetType")
+ final val GetMethod: TermName = newTermName("GetMethod")
+ final val Invoke: TermName = newTermName("Invoke")
+ final val JavaLang: TermName = newTermName("System")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> "System.Boolean",
@@ -615,7 +884,6 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
private class J2SENames extends JavaNames {
final val BeanProperty: TypeName = "scala.beans.BeanProperty"
final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
- final val Code: TypeName = "scala.reflect.Code"
final val JavaSerializable: TypeName = "java.io.Serializable"
}
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 29ac5fe539..1973a97279 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -10,9 +10,11 @@ import scala.collection.{ mutable, immutable }
import util._
abstract class SymbolTable extends api.Universe
+ with Collections
with Names
with Symbols
with Types
+ with Kinds
with Scopes
with Definitions
with Constants
@@ -31,12 +33,26 @@ abstract class SymbolTable extends api.Universe
{
def rootLoader: LazyType
def log(msg: => AnyRef): Unit
- def abort(msg: String): Nothing = throw new FatalError(msg)
+ def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+
+ @deprecated("2.10.0", "Give us a reason")
def abort(): Nothing = abort("unknown error")
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+
+ /** Overridden when we know more about what was happening during a failure. */
+ def supplementErrorMessage(msg: String): String = msg
+
+ private[scala] def printResult[T](msg: String)(result: T) = {
+ Console.err.println(msg + ": " + result)
+ result
+ }
+ private[scala] def logResult[T](msg: String)(result: T): T = {
+ log(msg + ": " + result)
+ result
+ }
/** Are we compiling for Java SE? */
// def forJVM: Boolean
@@ -104,9 +120,11 @@ abstract class SymbolTable extends api.Universe
try op
finally phase = current
}
-
- @inline final def afterPhase[T](ph: Phase)(op: => T): T =
- atPhase(ph.next)(op)
+ /** Since when it is to be "at" a phase is inherently ambiguous,
+ * a couple unambiguously named methods.
+ */
+ @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
+ @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op)
@inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
if (target != NoPhase && phase.id > target.id) atPhase(target)(op) else op
@@ -147,7 +165,7 @@ abstract class SymbolTable extends api.Universe
}
}
// enter decls of parent classes
- for (pt <- container.info.parents; p = pt.typeSymbol) {
+ for (p <- container.parentSymbols) {
if (p != definitions.ObjectClass && p != definitions.ScalaObjectClass) {
openPackageModule(p, dest)
}
@@ -260,4 +278,9 @@ abstract class SymbolTable extends api.Universe
/** The phase which has given index as identifier. */
val phaseWithId: Array[Phase]
+
+ /** Is this symbol table part of reflexive mirror? In this case
+ * operations need to be made thread safe.
+ */
+ def inReflexiveMirror = false
}
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 0c57f0c43a..77ed2f6a1b 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -16,10 +16,13 @@ import api.Modifier
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
- private var ids = 0
+ protected var ids = 0
+
+ val emptySymbolArray = new Array[Symbol](0)
+
def symbolCount = ids // statistics
- val emptySymbolArray = new Array[Symbol](0)
+ protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
//protected var activeLocks = 0
@@ -31,10 +34,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private var recursionTable = immutable.Map.empty[Symbol, Int]
private var nextexid = 0
- private def freshExistentialName(suffix: String) = {
+ protected def freshExistentialName(suffix: String) = {
nextexid += 1
newTypeName("_" + nextexid + suffix)
}
+
+ // Set the fields which point companions at one another. Returns the module.
+ def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
+ moduleClass.sourceModule = m
+ m setModuleClass moduleClass
+ m
+ }
+
+ /** Create a new free variable. Its owner is NoSymbol.
+ */
+ def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
+ new FreeVar(name, value) initFlags newFlags setInfo tpe
/** The original owner of a class. Used by the backend to generate
* EnclosingMethod attributes.
@@ -42,22 +57,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
abstract class AbsSymbolImpl extends AbsSymbol { this: Symbol =>
- def newNestedSymbol(pos: Position, name: Name) = name match {
- case n: TermName => newValue(pos, n)
- case n: TypeName => newAliasType(pos, n)
- }
- def typeSig: Type = info
- def typeSigIn(site: Type): Type = site.memberInfo(this)
+ def newNestedSymbol(name: Name, pos: Position, newFlags: Long) = name match {
+ case n: TermName => newTermSymbol(n, pos, newFlags)
+ case n: TypeName => newTypeSymbol(n, pos, newFlags)
+ }
+ def enclosingClass: Symbol = enclClass
+ def enclosingMethod: Symbol = enclMethod
+ def thisPrefix: Type = thisType
+ def selfType: Type = typeOfThis
+ def typeSignature: Type = info
+ def typeSignatureIn(site: Type): Type = site memberInfo this
+
def asType: Type = tpe
def asTypeIn(site: Type): Type = site.memberType(this)
def asTypeConstructor: Type = typeConstructor
def setInternalFlags(flag: Long): this.type = { setFlag(flag); this }
- def setTypeSig(tpe: Type): this.type = { setInfo(tpe); this }
+ def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this }
def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
}
/** The class for all symbols */
- abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name)
+ abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
extends AbsSymbolImpl
with HasFlags
with Annotatable[Symbol] {
@@ -66,87 +86,132 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
- var rawowner = initOwner
- var rawname = initName
- var rawflags = 0L
-
+ private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ private[this] var _rawname = initName
+ private[this] var _rawflags = 0L
+
+ def rawowner = _rawowner
+ def rawname = _rawname
+ def rawflags = _rawflags
+
+ protected def rawflags_=(x: FlagsType) { _rawflags = x }
+
private var rawpos = initPos
- val id = { ids += 1; ids } // identity displayed when -uniqid
- //assert(id != 3204, initName)
+
+ val id = nextId() // identity displayed when -uniqid
- var validTo: Period = NoPeriod
+ private[this] var _validTo: Period = NoPeriod
+
+ def validTo = _validTo
+ def validTo_=(x: Period) { _validTo = x}
def pos = rawpos
def setPos(pos: Position): this.type = { this.rawpos = pos; this }
- override def hasModifier(mod: Modifier.Value) =
+ /** !!! The logic after "hasFlag" is far too opaque to be unexplained.
+ * I'm guessing it's attempting to compensate for flag overloading,
+ * and embedding such logic in an undocumented island like this is a
+ * notarized guarantee of future breakage.
+ */
+ override def hasModifier(mod: Modifier) =
hasFlag(flagOfModifier(mod)) &&
(!(mod == Modifier.bynameParameter) || isTerm) &&
(!(mod == Modifier.covariant) || isType)
- override def allModifiers: Set[Modifier.Value] =
+ override def modifiers: Set[Modifier] =
Modifier.values filter hasModifier
// ------ creators -------------------------------------------------------------------
- final def newValue(pos: Position, name: TermName) =
- new TermSymbol(this, pos, name)
- final def newValue(name: TermName, pos: Position = NoPosition) =
- new TermSymbol(this, pos, name)
- final def newVariable(pos: Position, name: TermName) =
- newValue(pos, name).setFlag(MUTABLE)
- final def newValueParameter(pos: Position, name: TermName) =
- newValue(pos, name).setFlag(PARAM)
+ final def newValue(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, newFlags)
+ final def newVariable(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, MUTABLE | newFlags)
+ final def newValueParameter(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, PARAM | newFlags)
+
/** Create local dummy for template (owner of local blocks) */
final def newLocalDummy(pos: Position) =
- newValue(pos, nme.localDummyName(this)).setInfo(NoType)
- final def newMethod(pos: Position, name: TermName) =
- new MethodSymbol(this, pos, name).setFlag(METHOD)
- final def newMethod(name: TermName, pos: Position = NoPosition) =
- new MethodSymbol(this, pos, name).setFlag(METHOD)
- final def newLabel(pos: Position, name: TermName) =
- newMethod(pos, name).setFlag(LABEL)
+ newTermSymbol(nme.localDummyName(this), pos) setInfo NoType
+ final def newMethod(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
+ newMethodSymbol(name, pos, METHOD | newFlags)
+ final def newLabel(name: TermName, pos: Position = NoPosition): MethodSymbol =
+ newMethod(name, pos, LABEL)
/** Propagates ConstrFlags (JAVA, specifically) from owner to constructor. */
- final def newConstructor(pos: Position) =
- newMethod(pos, nme.CONSTRUCTOR) setFlag getFlag(ConstrFlags)
+ final def newConstructor(pos: Position, newFlags: Long = 0L) =
+ newMethod(nme.CONSTRUCTOR, pos, getFlag(ConstrFlags) | newFlags)
+
/** Static constructor with info set. */
def newStaticConstructor(pos: Position) =
- newConstructor(pos) setFlag STATIC setInfo UnitClass.tpe
+ newConstructor(pos, STATIC) setInfo UnitClass.tpe
/** Instance constructor with info set. */
def newClassConstructor(pos: Position) =
newConstructor(pos) setInfo MethodType(Nil, this.tpe)
- private def finishModule(m: ModuleSymbol, clazz: ClassSymbol): ModuleSymbol = {
- // Top-level objects can be automatically marked final, but others
- // must be explicitly marked final if overridable objects are enabled.
- val flags = if (isPackage || !settings.overrideObjects.value) MODULE | FINAL else MODULE
- m setFlag flags
- m setModuleClass clazz
- m
+ // Top-level objects can be automatically marked final, but others
+ // must be explicitly marked final if overridable objects are enabled.
+ private def ModuleFlags = (
+ if (isPackage || !settings.overrideObjects.value) MODULE | FINAL
+ else MODULE
+ )
+ def newLinkedModule(clazz: Symbol, newFlags: Long = 0L): ModuleSymbol = {
+ val m = newModuleSymbol(clazz.name.toTermName, clazz.pos, ModuleFlags | newFlags)
+ connectModuleToClass(m, clazz.asInstanceOf[ClassSymbol])
+ }
+ final def newModule(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = {
+ val m = newModuleSymbol(name, pos, newFlags | ModuleFlags)
+ val clazz = newModuleClassSymbol(name.toTypeName, pos, (m getFlag ModuleToClassFlags) | MODULE)
+ connectModuleToClass(m, clazz)
}
- private def finishModule(m: ModuleSymbol): ModuleSymbol =
- finishModule(m, new ModuleClassSymbol(m))
-
- final def newModule(pos: Position, name: TermName, clazz: ClassSymbol): ModuleSymbol =
- finishModule(new ModuleSymbol(this, pos, name), clazz)
-
- final def newModule(name: TermName, clazz: Symbol, pos: Position = NoPosition): ModuleSymbol =
- newModule(pos, name, clazz.asInstanceOf[ClassSymbol])
-
- final def newModule(pos: Position, name: TermName): ModuleSymbol =
- finishModule(new ModuleSymbol(this, pos, name))
- final def newPackage(pos: Position, name: TermName): ModuleSymbol = {
- assert(name == nme.ROOT || isPackageClass)
- val m = newModule(pos, name).setFlag(JAVA | PACKAGE)
- m.moduleClass setFlag (JAVA | PACKAGE)
- m
+ final def newPackage(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = {
+ assert(name == nme.ROOT || isPackageClass, this)
+ newModule(name, pos, JAVA | PACKAGE | newFlags)
}
final def newThisSym(pos: Position) =
- newValue(pos, nme.this_).setFlag(SYNTHETIC)
+ newTermSymbol(nme.this_, pos, SYNTHETIC)
final def newImport(pos: Position) =
- newValue(pos, nme.IMPORT)
+ newTermSymbol(nme.IMPORT, pos)
+
+ /** Direct symbol factories.
+ * For internal use; these are unlikely to be what you want.
+ */
+ def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ new TermSymbol(this, pos, name) initFlags newFlags
+
+ def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
+ new AbstractTypeSymbol(this, pos, name) initFlags newFlags
+
+ def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
+ new AliasTypeSymbol(this, pos, name) initFlags newFlags
+
+ def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
+ new ModuleSymbol(this, pos, name) initFlags newFlags
+
+ def newMethodSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
+ new MethodSymbol(this, pos, name) initFlags newFlags
+
+ def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
+ new ClassSymbol(this, pos, name) initFlags newFlags
+
+ def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
+ new ModuleClassSymbol(this, pos, name) initFlags newFlags
+
+ /** Derive whether it is an abstract type from the flags; after creation
+ * the DEFERRED flag will be ignored.
+ */
+ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+ if ((newFlags & DEFERRED) == 0L)
+ newAliasTypeSymbol(name, pos, newFlags)
+ else
+ newAbstractTypeSymbol(name, pos, newFlags)
+
+ def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
+ if ((newFlags & DEFERRED) == 0L)
+ new TypeSkolem(this, pos, name, origin) initFlags newFlags
+ else
+ new TypeSkolem(this, pos, name, origin) with AbstractTypeMixin initFlags newFlags
/** @param pre type relative to which alternatives are seen.
* for instance:
@@ -166,58 +231,54 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* pre.memberType(m)
*/
- final def newOverloaded(pre: Type, alternatives: List[Symbol]): Symbol =
- newValue(alternatives.head.pos, alternatives.head.name.toTermName)
- .setFlag(OVERLOADED)
- .setInfo(OverloadedType(pre, alternatives))
-
- /** for explicit outer phase */
- final def newOuterAccessor(pos: Position) = {
- val sym = newMethod(pos, nme.OUTER)
- sym setFlag (STABLE | SYNTHETIC)
- if (isTrait) sym setFlag DEFERRED
- sym.expandName(this)
- sym.referenced = this
- sym
- }
+ final def newOverloaded(pre: Type, alternatives: List[Symbol]): Symbol = (
+ newTermSymbol(alternatives.head.name.toTermName, alternatives.head.pos, OVERLOADED)
+ setInfo OverloadedType(pre, alternatives)
+ )
final def newErrorValue(name: TermName) =
- newValue(pos, name).setFlag(SYNTHETIC | IS_ERROR).setInfo(ErrorType)
+ newTermSymbol(name, pos, SYNTHETIC | IS_ERROR) setInfo ErrorType
/** Symbol of a type definition type T = ...
*/
- final def newAliasType(pos: Position, name: TypeName) =
- new TypeSymbol(this, pos, name)
- final def newAliasType(name: TypeName, pos: Position = NoPosition) =
- new TypeSymbol(this, pos, name)
-
+ final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ newAliasTypeSymbol(name, pos, newFlags)
+
/** Symbol of an abstract type type T >: ... <: ...
*/
- final def newAbstractType(pos: Position, name: TypeName) =
- new TypeSymbol(this, pos, name).setFlag(DEFERRED)
- final def newAbstractType(name: TypeName, pos: Position = NoPosition) =
- new TypeSymbol(this, pos, name).setFlag(DEFERRED)
+ final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ newAbstractTypeSymbol(name, pos, DEFERRED | newFlags)
/** Symbol of a type parameter
*/
- final def newTypeParameter(pos: Position, name: TypeName) =
- newAbstractType(pos, name).setFlag(PARAM)
+ final def newTypeParameter(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
+ newAbstractType(name, pos, PARAM | newFlags)
/** Synthetic value parameters when parameter symbols are not available
*/
final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[Symbol]] = {
var cnt = 0
- def freshName() = { cnt += 1; newTermName("x$" + cnt) }
- def param(tp: Type) =
- newValueParameter(focusPos(owner.pos), freshName()).setFlag(SYNTHETIC).setInfo(tp)
- argtypess map (_.map(param))
+ def freshName() = { cnt += 1; nme.syntheticParamName(cnt) }
+ mmap(argtypess)(tp => newValueParameter(freshName(), focusPos(owner.pos), SYNTHETIC) setInfo tp)
}
+
+ def newSyntheticTypeParam(): Symbol = newSyntheticTypeParam("T0", 0L)
+ def newSyntheticTypeParam(name: String, newFlags: Long): Symbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
+ def newSyntheticTypeParams(num: Int): List[Symbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
- final def newExistential(pos: Position, name: TypeName): Symbol =
- newAbstractType(pos, name).setFlag(EXISTENTIAL)
+ /** Create a new existential type skolem with this symbol its owner,
+ * based on the given symbol and origin.
+ */
+ def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
+ val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
+ skolem setInfo (basis.info cloneInfo skolem)
+ }
+
+ final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
+ newAbstractType(name, pos, EXISTENTIAL | newFlags)
final def freshExistential(suffix: String): Symbol =
- newExistential(pos, freshExistentialName(suffix))
+ newExistential(freshExistentialName(suffix), pos)
/** Synthetic value parameters when parameter symbols are not available.
* Calling this method multiple times will re-use the same parameter names.
@@ -237,58 +298,73 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* body of the method, there's a local copy of `T` which is a TypeSkolem.
*/
final def newTypeSkolem: Symbol =
- new TypeSkolem(owner, pos, name.toTypeName, this)
- .setFlag(flags)
+ owner.newTypeSkolemSymbol(name.toTypeName, this, pos, flags)
- final def newClass(pos: Position, name: TypeName) =
- new ClassSymbol(this, pos, name)
- final def newClass(name: TypeName, pos: Position = NoPosition) =
- new ClassSymbol(this, pos, name)
+ final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L) =
+ newClassSymbol(name, pos, newFlags)
+
+ /** A new class with its info set to a ClassInfoType with given scope and parents. */
+ def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L) = {
+ val clazz = newClass(name, pos, newFlags)
+ clazz setInfo ClassInfoType(parents, scope, clazz)
+ }
+ final def newErrorClass(name: TypeName) =
+ newClassWithInfo(name, Nil, new ErrorScope(this), pos, SYNTHETIC | IS_ERROR)
- final def newModuleClass(pos: Position, name: TypeName) =
- new ModuleClassSymbol(this, pos, name)
final def newModuleClass(name: TypeName, pos: Position = NoPosition) =
- new ModuleClassSymbol(this, pos, name)
+ newModuleClassSymbol(name, pos)
final def newAnonymousClass(pos: Position) =
- newClass(pos, tpnme.ANON_CLASS_NAME)
- final def newAnonymousFunctionClass(pos: Position) =
- newClass(pos, tpnme.ANON_FUN_NAME)
+ newClassSymbol(tpnme.ANON_CLASS_NAME, pos)
+
+ final def newAnonymousFunctionClass(pos: Position, newFlags: Long = 0L) =
+ newClassSymbol(tpnme.ANON_FUN_NAME, pos, FINAL | SYNTHETIC | newFlags)
+
+ final def newAnonymousFunctionValue(pos: Position, newFlags: Long = 0L) =
+ newTermSymbol(nme.ANON_FUN_NAME, pos, SYNTHETIC | newFlags) setInfo NoType
/** Refinement types P { val x: String; type T <: Number }
* also have symbols, they are refinementClasses
*/
final def newRefinementClass(pos: Position) =
- newClass(pos, tpnme.REFINE_CLASS_NAME)
+ newClass(tpnme.REFINE_CLASS_NAME, pos)
/** Create a new getter for current symbol (which must be a field)
*/
final def newGetter: Symbol = (
- owner.newMethod(focusPos(pos), nme.getterName(name))
- setFlag getterFlags(flags)
+ owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
setPrivateWithin privateWithin
setInfo MethodType(Nil, tpe)
)
- final def newErrorClass(name: TypeName) = {
- val clazz = newClass(pos, name)
- ( clazz
- setFlag (SYNTHETIC | IS_ERROR)
- setInfo ClassInfoType(Nil, new ErrorScope(this), clazz)
- )
- }
-
final def newErrorSymbol(name: Name): Symbol = name match {
case x: TypeName => newErrorClass(x)
case x: TermName => newErrorValue(x)
}
+ @deprecated("Use the other signature", "2.10.0")
+ def newClass(pos: Position, name: TypeName): Symbol = newClass(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newModuleClass(pos: Position, name: TypeName): Symbol = newModuleClass(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newLabel(pos: Position, name: TermName): MethodSymbol = newLabel(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newValue(pos: Position, name: TermName): TermSymbol = newTermSymbol(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newAliasType(pos: Position, name: TypeName): Symbol = newAliasType(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newAbstractType(pos: Position, name: TypeName): Symbol = newAbstractType(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newExistential(pos: Position, name: TypeName): Symbol = newExistential(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newMethod(pos: Position, name: TermName): MethodSymbol = newMethod(name, pos)
+
// ----- locking and unlocking ------------------------------------------------------
// True if the symbol is unlocked.
// True if the symbol is locked but still below the allowed recursion depth.
// False otherwise
- def lockOK: Boolean = {
+ private[scala] def lockOK: Boolean = {
((rawflags & LOCKED) == 0L) ||
((settings.Yrecursion.value != 0) &&
(recursionTable get this match {
@@ -297,33 +373,37 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
// Lock a symbol, using the handler if the recursion depth becomes too great.
- def lock(handler: => Unit) = {
+ private[scala] def lock(handler: => Unit): Boolean = {
if ((rawflags & LOCKED) != 0L) {
if (settings.Yrecursion.value != 0) {
recursionTable get this match {
case Some(n) =>
if (n > settings.Yrecursion.value) {
handler
+ false
} else {
recursionTable += (this -> (n + 1))
+ true
}
case None =>
recursionTable += (this -> 1)
+ true
}
- } else { handler }
+ } else { handler; false }
} else {
rawflags |= LOCKED
+ true
// activeLocks += 1
// lockedSyms += this
}
}
// Unlock a symbol
- def unlock() = {
+ private[scala] def unlock() = {
if ((rawflags & LOCKED) != 0L) {
// activeLocks -= 1
// lockedSyms -= this
- rawflags = rawflags & ~LOCKED
+ _rawflags = rawflags & ~LOCKED
if (settings.Yrecursion.value != 0)
recursionTable -= this
}
@@ -409,7 +489,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isError = hasFlag(IS_ERROR)
final def isErroneous = isError || isInitialized && tpe.isErroneous
final def isTypeParameterOrSkolem = isType && hasFlag(PARAM)
- final def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
+ final def isHigherOrderTypeParameter = (this ne NoSymbol) && owner.isTypeParameterOrSkolem
final def isTypeSkolem = isSkolem && hasFlag(PARAM)
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
@@ -424,20 +504,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isAnonOrRefinementClass = isAnonymousClass || isRefinementClass
// A package object or its module class
- final def isPackageObjectOrClass = name == nme.PACKAGE || name == tpnme.PACKAGE
- final def isPackageObject = name == nme.PACKAGE && owner.isPackageClass
- final def isPackageObjectClass = name == tpnme.PACKAGE && owner.isPackageClass
+ final def isPackageObjectOrClass = (this ne NoSymbol) && owner.isPackageClass && (name == nme.PACKAGE || name == tpnme.PACKAGE)
+ final def isPackageObject = (this ne NoSymbol) && owner.isPackageClass && name == nme.PACKAGE
+ final def isPackageObjectClass = (this ne NoSymbol) && owner.isPackageClass && name == tpnme.PACKAGE
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def isJavaInterface = isJavaDefined && isTrait
- final def needsFlatClasses: Boolean = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
// In java.lang, Predef, or scala package/package object
def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner)
/** The owner, skipping package objects.
*/
- def effectiveOwner = owner.skipPackageObject
+ def effectiveOwner = if (owner.isPackageObjectClass) owner.skipPackageObject else owner
/** If this is a package object or its implementing class, its owner: otherwise this.
*/
@@ -688,7 +768,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
def owner: Symbol = rawowner
- final def owner_=(owner: Symbol) {
+ def owner_=(owner: Symbol) {
// don't keep the original owner in presentation compiler runs
// (the map will grow indefinitely, and the only use case is the
// backend).
@@ -696,18 +776,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (originalOwner contains this) ()
else originalOwner(this) = rawowner
}
-
- rawowner = owner
- }
- private[Symbols] def flattenName(): Name = {
- // This assertion caused me no end of trouble in the interpeter in situations
- // where everything proceeds smoothly if there's no assert. I don't think calling "name"
- // on a symbol is the right place to throw fatal exceptions if things don't look right.
- // It really hampers exploration. Finally I gave up and disabled it, and tickets like
- // SI-4874 instantly start working.
- // assert(rawowner.isClass, "fatal: %s has non-class owner %s after flatten.".format(rawname + idString, rawowner))
-
- nme.flattenedName(rawowner.name, rawname)
+ assert(!inReflexiveMirror, "owner_= is not thread-safe; cannot be run in reflexive code")
+ _rawowner = owner
}
def ownerChain: List[Symbol] = this :: owner.ownerChain
@@ -740,7 +810,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def name: Name = rawname
- final def name_=(name: Name) {
+ def name_=(name: Name) {
if (name != rawname) {
if (owner.isClass) {
var ifs = owner.infos
@@ -749,7 +819,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
ifs = ifs.prev
}
}
- rawname = name
+ _rawname = name
}
}
@@ -793,7 +863,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* Never adds id.
* Drops package objects.
*/
- final def fullName(separator: Char): String = nme.dropLocalSuffix(fullNameInternal(separator)).toString
+ final def fullName(separator: Char): String = fullNameAsName(separator).toString
/** Doesn't drop package objects, for those situations (e.g. classloading)
* where the true path is needed.
@@ -801,8 +871,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def fullNameInternal(separator: Char): Name = (
if (isRoot || isRootPackage || this == NoSymbol) name
else if (owner.isEffectiveRoot) name
- else effectiveOwner.enclClass.fullName(separator) append separator append name
+ else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
)
+
+ def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by periods.
@@ -815,11 +887,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val fs = rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
- final def flags_=(fs: Long) = rawflags = fs
- final def setFlag(mask: Long): this.type = { rawflags = rawflags | mask; this }
- final def resetFlag(mask: Long): this.type = { rawflags = rawflags & ~mask; this }
+ def flags_=(fs: Long) = _rawflags = fs
+
+ /** Set the symbol's flags to the given value, asserting
+ * that the previous value was 0.
+ */
+ def initFlags(mask: Long): this.type = {
+ assert(rawflags == 0L, this)
+ _rawflags = mask
+ this
+ }
+ def setFlag(mask: Long): this.type = { _rawflags = rawflags | mask ; this }
+ def resetFlag(mask: Long): this.type = { _rawflags = rawflags & ~mask ; this }
final def getFlag(mask: Long): Long = flags & mask
- final def resetFlags() { rawflags = rawflags & TopLevelCreationFlags }
+ final def resetFlags() { _rawflags = rawflags & TopLevelCreationFlags }
/** Does symbol have ANY flag in `mask` set? */
final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
@@ -905,7 +986,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
throw CyclicReference(this, tp)
}
} else {
- rawflags |= LOCKED
+ _rawflags |= LOCKED
// activeLocks += 1
// lockedSyms += this
}
@@ -914,7 +995,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
phase = phaseOf(infos.validFrom)
tp.complete(this)
} finally {
- // if (id == 431) println("completer ran "+tp.getClass+" for "+fullName)
unlock()
phase = current
}
@@ -935,7 +1015,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
assert(info ne null)
infos = TypeHistory(currentPeriod, info, null)
unlock()
- validTo = if (info.isComplete) currentPeriod else NoPeriod
+ _validTo = if (info.isComplete) currentPeriod else NoPeriod
}
/** Set initial info. */
@@ -945,13 +1025,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Substitute second list of symbols for first in current info. */
def substInfo(syms0: List[Symbol], syms1: List[Symbol]) = modifyInfo(_.substSym(syms0, syms1))
def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
+
+ /** Set the info and enter this symbol into the owner's scope. */
+ def setInfoAndEnter(info: Type): this.type = {
+ setInfo(info)
+ owner.info.decls enter this
+ this
+ }
/** Set new info valid from start of this phase. */
- final def updateInfo(info: Type): Symbol = {
+ def updateInfo(info: Type): Symbol = {
assert(phaseId(infos.validFrom) <= phase.id)
if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
infos = TypeHistory(currentPeriod, info, infos)
- validTo = if (info.isComplete) currentPeriod else NoPeriod
+ _validTo = if (info.isComplete) currentPeriod else NoPeriod
this
}
@@ -989,11 +1076,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
infos = TypeHistory(currentPeriod + 1, info1, infos)
this.infos = infos
}
- validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform
+ _validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform
itr = itr.next
}
- validTo = if (itr.pid == NoPhase.id) curPeriod
- else period(currentRunId, itr.pid)
+ _validTo = if (itr.pid == NoPhase.id) curPeriod
+ else period(currentRunId, itr.pid)
}
} finally {
phase = current
@@ -1004,7 +1091,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
// adapt to new run in fsc.
- private def adaptInfos(infos: TypeHistory): TypeHistory =
+ private def adaptInfos(infos: TypeHistory): TypeHistory = {
+ assert(!inReflexiveMirror)
if (infos == null || runId(infos.validFrom) == currentRunId) {
infos
} else {
@@ -1013,7 +1101,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else {
val pid = phaseId(infos.validFrom)
- validTo = period(currentRunId, pid)
+ _validTo = period(currentRunId, pid)
phase = phaseWithId(pid)
val info1 = (
@@ -1029,6 +1117,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
}
+ }
/** Initialize the symbol */
final def initialize: this.type = {
@@ -1038,6 +1127,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Was symbol's type updated during given phase? */
final def isUpdatedAt(pid: Phase#Id): Boolean = {
+ assert(!inReflexiveMirror)
var infos = this.infos
while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
infos ne null
@@ -1045,6 +1135,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Was symbol's type updated during given phase? */
final def hasTypeAt(pid: Phase#Id): Boolean = {
+ assert(!inReflexiveMirror)
var infos = this.infos
while ((infos ne null) && phaseId(infos.validFrom) > pid) infos = infos.prev
infos ne null
@@ -1132,7 +1223,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
oldsymbuf += sym
newsymbuf += (
if (sym.isClass)
- tp.typeSymbol.newAbstractType(sym.pos, sym.name.toTypeName).setInfo(sym.existentialBound)
+ tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound)
else
sym.cloneSymbol(tp.typeSymbol))
}
@@ -1149,22 +1240,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the bound of the type variable that stands for it
* pre: symbol is a term, a class, or an abstract type (no alias type allowed)
*/
- def existentialBound: Type =
- if (this.isClass)
- polyType(this.typeParams, TypeBounds.upper(this.classBound))
- else if (this.isAbstractType)
- this.info
- else if (this.isTerm)
- singletonBounds(this.tpe)
- else
- abort("unexpected alias type: "+this)
+ def existentialBound: Type
/** Reset symbol to initial state
*/
def reset(completer: Type) {
resetFlags()
infos = null
- validTo = NoPeriod
+ _validTo = NoPeriod
//limit = NoPhase.id
setInfo(completer)
}
@@ -1191,14 +1274,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ----- annotations ------------------------------------------------------------
// null is a marker that they still need to be obtained.
- private var _annotations: List[AnnotationInfo] = Nil
+ private[this] var _annotations: List[AnnotationInfo] = Nil
def annotationsString = if (annotations.isEmpty) "" else annotations.mkString("(", ", ", ")")
/** After the typer phase (before, look at the definition's Modifiers), contains
* the annotations attached to member a definition (class, method, type, field).
*/
- def annotations: List[AnnotationInfo] = _annotations
+ def annotations: List[AnnotationInfo] = {
+ // Necessary for reflection, see SI-5423
+ if (inReflexiveMirror)
+ initialize
+
+ _annotations
+ }
+
def setAnnotations(annots: List[AnnotationInfo]): this.type = {
_annotations = annots
this
@@ -1244,12 +1334,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def isNestedIn(that: Symbol): Boolean =
owner == that || owner != NoSymbol && (owner isNestedIn that)
-
- /** Is this class symbol a subclass of that symbol? */
- final def isNonBottomSubClass(that: Symbol): Boolean = (
- (this eq that) || this.isError || that.isError ||
- info.baseTypeIndex(that) >= 0
- )
+
+ /** Is this class symbol a subclass of that symbol,
+ * and is this class symbol also different from Null or Nothing? */
+ def isNonBottomSubClass(that: Symbol): Boolean = false
/** Overridden in NullClass and NothingClass for custom behavior.
*/
@@ -1292,18 +1380,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
cloneSymbol(owner)
/** A clone of this symbol, but with given owner. */
- final def cloneSymbol(owner: Symbol): Symbol = {
- val newSym = cloneSymbolImpl(owner)
+ final def cloneSymbol(owner: Symbol): Symbol = cloneSymbol(owner, this.rawflags)
+ final def cloneSymbol(owner: Symbol, newFlags: Long): Symbol = {
+ val newSym = cloneSymbolImpl(owner, newFlags)
( newSym
setPrivateWithin privateWithin
setInfo (info cloneInfo newSym)
- setFlag this.rawflags
setAnnotations this.annotations
)
}
-
- /** Internal method to clone a symbol's implementation without flags or type. */
- def cloneSymbolImpl(owner: Symbol): Symbol
+ /** Internal method to clone a symbol's implementation with the given flags and no info. */
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol
+ def cloneSymbolImpl(owner: Symbol): Symbol = cloneSymbolImpl(owner, 0L)
// ------ access to related symbols --------------------------------------------------
@@ -1371,7 +1459,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The symbol accessed by this accessor function, but with given owner type. */
final def accessed(ownerTp: Type): Symbol = {
assert(hasAccessorFlag, this)
- ownerTp decl nme.getterToLocal(getterName)
+ ownerTp decl nme.getterToLocal(getterName.toTermName)
}
/** The module corresponding to this module class (note that this
@@ -1379,7 +1467,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def sourceModule: Symbol = NoSymbol
- /** The implementation class of a trait. */
+ /** The implementation class of a trait. If available it will be the
+ * symbol with the same owner, and the name of this symbol with $class
+ * appended to it.
+ */
final def implClass: Symbol = owner.info.decl(nme.implClassName(name))
/** The class that is logically an outer class of given `clazz`.
@@ -1412,6 +1503,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The superclass of this class. */
def superClass: Symbol = if (info.parents.isEmpty) NoSymbol else info.parents.head.typeSymbol
+ def parentSymbols: List[Symbol] = info.parents map (_.typeSymbol)
/** The directly or indirectly inherited mixins of this class
* except for mixin classes inherited by the superclass. Mixin classes appear
@@ -1486,11 +1578,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isMethod || isClass) this
else owner.logicallyEnclosingMember
+ /** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
+ @deprecated("Use enclosingTopLevelClass")
+ def toplevelClass: Symbol = enclosingTopLevelClass
+
/** The top-level class containing this symbol. */
- def toplevelClass: Symbol =
+ def enclosingTopLevelClass: Symbol =
if (owner.isPackageClass) {
if (isClass) this else moduleClass
- } else owner.toplevelClass
+ } else owner.enclosingTopLevelClass
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
def isCoDefinedWith(that: Symbol) = (
@@ -1608,6 +1704,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* (which is always the interface, by convention)
* - before erasure, it looks up the interface name in the scope of the owner of the class.
* This only works for implementation classes owned by other classes or traits.
+ * !!! Why?
*/
final def toInterface: Symbol =
if (isImplClass) {
@@ -1704,17 +1801,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def getter(base: Symbol): Symbol = base.info.decl(getterName) filter (_.hasAccessorFlag)
- def getterName: Name = (
- if (isSetter) nme.setterToGetter(name)
- else if (nme.isLocalName(name)) nme.localToGetter(name)
- else name
+ def getterName: TermName = (
+ if (isSetter) nme.setterToGetter(name.toTermName)
+ else if (nme.isLocalName(name)) nme.localToGetter(name.toTermName)
+ else name.toTermName
)
/** The setter of this value or getter definition, or NoSymbol if none exists */
final def setter(base: Symbol): Symbol = setter(base, false)
final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
- var sname = nme.getterToSetter(nme.getterName(name))
+ var sname = nme.getterToSetter(nme.getterName(name.toTermName))
if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
base.info.decl(sname) filter (_.hasAccessorFlag)
}
@@ -1757,6 +1854,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
+ /** Remove any access boundary and clear flags PROTECTED | PRIVATE.
+ */
+ def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags
+
+ /** The first parameter to the first argument list of this method,
+ * or NoSymbol if inapplicable.
+ */
+ def firstParam = info.params match {
+ case p :: _ => p
+ case _ => NoSymbol
+ }
+
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters
*/
@@ -1769,7 +1878,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
getter(owner).expandName(base)
setter(owner).expandName(base)
}
- name = nme.expandedName(name, base)
+ name = nme.expandedName(name.toTermName, base)
if (isType) name = name
}
}
@@ -1782,7 +1891,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def sourceFile: AbstractFileType =
if (isModule) moduleClass.sourceFile
- else toplevelClass.sourceFile
+ else enclosingTopLevelClass.sourceFile
def sourceFile_=(f: AbstractFileType) {
abort("sourceFile_= inapplicable for " + this)
@@ -1827,36 +1936,43 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isTerm && (!isParameter || isParamAccessor)) "val"
else ""
+ private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
+ private def symbolKind: SymbolKind = {
+ val kind =
+ if (isInstanceOf[FreeVar]) ("free variable", "free variable", "FV")
+ else if (isPackage) ("package", "package", "PK")
+ else if (isPackageClass) ("package class", "package", "PKC")
+ else if (isPackageObject) ("package object", "package", "PKO")
+ else if (isPackageObjectClass) ("package object class", "package", "PKOC")
+ else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
+ else if (isRefinementClass) ("refinement class", "", "RC")
+ else if (isModule) ("module", "object", "MOD")
+ else if (isModuleClass) ("module class", "object", "MODC")
+ else if (isGetter) ("getter", if (isSourceMethod) "method" else "value", "GET")
+ else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET")
+ else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ")
+ else if (isVariable) ("field", "variable", "VAR")
+ else if (isTrait) ("trait", "trait", "TRT")
+ else if (isClass) ("class", "class", "CLS")
+ else if (isType) ("type", "type", "TPE")
+ else if (isClassConstructor) ("constructor", "constructor", "CTOR")
+ else if (isSourceMethod) ("method", "method", "METH")
+ else if (isTerm) ("value", "value", "VAL")
+ else ("", "", "???")
+ SymbolKind(kind._1, kind._2, kind._3)
+ }
+
/** Accurate string representation of symbols' kind, suitable for developers. */
final def accurateKindString: String =
- if (isPackage) "package"
- else if (isPackageClass) "package class"
- else if (isPackageObject) "package object"
- else if (isPackageObjectClass) "package object class"
- else if (isRefinementClass) "refinement class"
- else if (isModule) "module"
- else if (isModuleClass) "module class"
- else if (isGetter) "getter"
- else if (isSetter) "setter"
- else if (isVariable) "field"
- else sanitizedKindString
+ symbolKind.accurate
/** String representation of symbol's kind, suitable for the masses. */
private def sanitizedKindString: String =
- if (isPackage || isPackageClass) "package"
- else if (isModule || isModuleClass) "object"
- else if (isAnonymousClass) "anonymous class"
- else if (isRefinementClass) ""
- else if (isTrait) "trait"
- else if (isClass) "class"
- else if (isType) "type"
- else if (isInstanceOf[FreeVar]) "free variable"
- else if (isTerm && isLazy) "lazy value"
- else if (isVariable) "variable"
- else if (isClassConstructor) "constructor"
- else if (isSourceMethod) "method"
- else if (isTerm) "value"
- else ""
+ symbolKind.sanitized
+
+ /** String representation of symbol's kind, suitable for the masses. */
+ protected[scala] def abbreviatedKindString: String =
+ symbolKind.abbreviation
final def kindString: String =
if (settings.debug.value) accurateKindString
@@ -1879,12 +1995,25 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* If !settings.debug translates expansions of operators back to operator symbol.
* E.g. $eq => =.
* If settings.uniqid, adds id.
+ * If settings.Yshowsymkinds, adds abbreviated symbol kind.
*/
def nameString: String = (
- if (settings.uniqid.value) decodedName + "#" + id
- else "" + decodedName
+ if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName
+ else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id
+ else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString
+ else decodedName + "#" + id + "#" + abbreviatedKindString
)
+ def fullNameString: String = {
+ def recur(sym: Symbol): String = {
+ if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) sym.nameString
+ else if (sym.owner.isEffectiveRoot) sym.nameString
+ else recur(sym.effectiveOwner.enclClass) + "." + sym.nameString
+ }
+
+ recur(this)
+ }
+
/** If settings.uniqid is set, the symbol's id, else "" */
final def idString = if (settings.uniqid.value) "#"+id else ""
@@ -1952,6 +2081,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def infosString = infos.toString()
+ def debugLocationString = fullLocationString + " " + debugFlagString
+ def debugFlagString = hasFlagsToString(-1L)
def hasFlagsToString(mask: Long): String = flagsToString(
flags & mask,
if (hasAccessBoundary) privateWithin.toString else ""
@@ -1999,17 +2130,22 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** A class for term symbols */
- class TermSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
+ class TermSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
extends Symbol(initOwner, initPos, initName) {
final override def isTerm = true
- override def name: TermName = super.name
+ override def name: TermName = rawname.toTermName
privateWithin = NoSymbol
- var referenced: Symbol = NoSymbol
+ private[this] var _referenced: Symbol = NoSymbol
+
+ def referenced: Symbol = _referenced
+ def referenced_=(x: Symbol) { _referenced = x }
+
+ def existentialBound = singletonBounds(this.tpe)
- def cloneSymbolImpl(owner: Symbol): Symbol =
- new TermSymbol(owner, pos, name).copyAttrsFrom(this)
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
+ owner.newTermSymbol(name, pos, newFlags).copyAttrsFrom(this)
def copyAttrsFrom(original: TermSymbol): this.type = {
referenced = original.referenced
@@ -2039,13 +2175,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (hasFlag(MODULE)) referenced else NoSymbol
def setModuleClass(clazz: Symbol): TermSymbol = {
- assert(hasFlag(MODULE))
+ assert(hasFlag(MODULE), this)
referenced = clazz
this
}
def setLazyAccessor(sym: Symbol): TermSymbol = {
- assert(isLazy && (referenced == NoSymbol || referenced == sym), this)
+ assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, debugFlagString, referenced, sym))
referenced = sym
this
}
@@ -2087,38 +2223,38 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** A class for module symbols */
- class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
+ class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
extends TermSymbol(initOwner, initPos, initName) {
private var flatname: TermName = null
- // This method could use a better name from someone clearer on what the condition expresses.
- private def isFlatAdjusted = !isMethod && needsFlatClasses
- override def owner: Symbol =
- if (isFlatAdjusted) rawowner.owner
+ override def owner = (
+ if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
-
- override def name: TermName =
- if (isFlatAdjusted) {
- if (flatname == null)
- flatname = flattenName().toTermName
-
+ )
+ override def name: TermName = (
+ if (!isMethod && needsFlatClasses) {
+ if (flatname eq null)
+ flatname = nme.flattenedName(rawowner.name, rawname)
+
flatname
- } else rawname
+ }
+ else rawname.toTermName
+ )
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new ModuleSymbol(owner, pos, name).copyAttrsFrom(this)
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
+ owner.newModuleSymbol(name, pos, newFlags).copyAttrsFrom(this)
}
/** A class for method symbols */
- class MethodSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
+ class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
extends TermSymbol(initOwner, initPos, initName) {
private var mtpePeriod = NoPeriod
private var mtpePre: Type = _
private var mtpeResult: Type = _
private var mtpeInfo: Type = _
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new MethodSymbol(owner, pos, name).copyAttrsFrom(this)
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
+ owner.newMethodSymbol(name, pos, newFlags).copyAttrsFrom(this)
def typeAsMemberOf(pre: Type): Type = {
if (mtpePeriod == currentPeriod) {
@@ -2135,24 +2271,71 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
res
}
}
+
+ class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends TypeSymbol(initOwner, initPos, initName) {
+ // Temporary programmatic help tracking down who might do such a thing
+ override def setFlag(mask: Long): this.type = {
+ if (isSetting(DEFERRED, mask)) {
+ println("Setting DEFERRED on alias at")
+ (new Throwable).printStackTrace
+ }
+ super.setFlag(mask)
+ }
+ final override def isAliasType = true
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AliasTypeSymbol =
+ owner.newAliasTypeSymbol(name, pos, newFlags)
+ }
+
+ class AbstractTypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends TypeSymbol(initOwner, initPos, initName) with AbstractTypeMixin {
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): AbstractTypeSymbol =
+ owner.newAbstractTypeSymbol(name, pos, newFlags)
+ }
+
+ /** Might be mixed into TypeSymbol or TypeSkolem.
+ */
+ trait AbstractTypeMixin extends TypeSymbol {
+ override def resetFlag(mask: Long): this.type = {
+ // Temporary programmatic help tracking down who might do such a thing
+ if (settings.debug.value) {
+ if (isClearing(DEFERRED, mask)) {
+ println("Clearing DEFERRED on abstract type at")
+ (new Throwable).printStackTrace
+ }
+ }
+ super.resetFlag(mask)
+ }
+ final override def isAbstractType = true
+ override def existentialBound = this.info
+ }
/** A class of type symbols. Alias and abstract types are direct instances
* of this class. Classes are instances of a subclass.
*/
- class TypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
- extends Symbol(initOwner, initPos, initName) {
+ abstract class TypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) {
privateWithin = NoSymbol
private var tyconCache: Type = null
private var tyconRunId = NoRunId
private var tpeCache: Type = _
private var tpePeriod = NoPeriod
+ /** Overridden in subclasses for which it makes sense.
+ */
+ def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+debugLocationString)
+
override def name: TypeName = super.name.asInstanceOf[TypeName]
final override def isType = true
override def isNonClassType = true
- override def isAbstractType = isDeferred
- override def isAliasType = !isDeferred
-
+ override def isAbstractType = {
+ if (settings.debug.value) {
+ if (isDeferred) {
+ println("TypeSymbol claims to be abstract type: " + this.getClass + " " + debugFlagString + " at ")
+ (new Throwable).printStackTrace
+ }
+ }
+ isDeferred
+ }
private def newTypeRef(targs: List[Type]) = {
val pre = if (hasFlag(PARAM | EXISTENTIAL)) NoPrefix else owner.thisType
typeRef(pre, this, targs)
@@ -2208,9 +2391,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
override def tpeHK = typeConstructor // @M! used in memberType
- // needed for experimental code for early types as type parameters
- // def refreshType() { tpePeriod = NoPeriod }
-
override def typeConstructor: Type = {
if ((tyconCache eq null) || tyconRunId != currentRunId) {
tyconCache = newTypeRef(Nil)
@@ -2226,6 +2406,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
super.info_=(tp)
}
+ final override def isNonBottomSubClass(that: Symbol): Boolean = (
+ (this eq that) || this.isError || that.isError ||
+ info.baseTypeIndex(that) >= 0
+ )
+
override def reset(completer: Type) {
super.reset(completer)
tpePeriod = NoPeriod
@@ -2247,8 +2432,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- def cloneSymbolImpl(owner: Symbol): Symbol =
- new TypeSymbol(owner, pos, name)
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
+ owner.newTypeSymbol(name, pos, newFlags)
incCounter(typeSymbolCount)
}
@@ -2266,7 +2451,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
*/
- class TypeSkolem(initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef)
+ class TypeSkolem protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef)
extends TypeSymbol(initOwner, initPos, initName) {
/** The skolemization level in place when the skolem was constructed */
@@ -2286,8 +2471,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
override def typeParams = info.typeParams
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new TypeSkolem(owner, pos, name, origin)
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol =
+ owner.newTypeSkolemSymbol(name, origin, pos, newFlags)
override def nameString: String =
if (settings.debug.value) (super.nameString + "&" + level)
@@ -2295,16 +2480,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** A class for class symbols */
- class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
+ class ClassSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
-
- private var source: AbstractFileType = null
- private var thissym: Symbol = this
+ private[this] var flatname: TypeName = null
+ private[this] var source: AbstractFileType = null
+ private[this] var thissym: Symbol = this
final override def isClass = true
final override def isNonClassType = false
final override def isAbstractType = false
final override def isAliasType = false
+
+ override def existentialBound = polyType(this.typeParams, TypeBounds.upper(this.classBound))
override def sourceFile =
if (owner.isPackageClass) source
@@ -2316,20 +2503,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thissym = this
}
- private var flatname: TypeName = null
-
- override def owner: Symbol =
- if (needsFlatClasses) rawowner.owner
- else rawowner
-
- override def name: TypeName =
- if (needsFlatClasses) {
- if (flatname == null)
- flatname = flattenName().toTypeName
- flatname
- }
- else rawname.asInstanceOf[TypeName]
-
private var thisTypeCache: Type = _
private var thisTypePeriod = NoPeriod
@@ -2345,7 +2518,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
thisTypeCache
}
-
+
+ override def owner: Symbol =
+ if (needsFlatClasses) rawowner.owner else rawowner
+ override def name: TypeName = (
+ if (needsFlatClasses) {
+ if (flatname eq null)
+ flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
+
+ flatname
+ }
+ else rawname.toTypeName
+ )
+
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2361,7 +2546,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
typeOfThisCache
}
- else thissym.tpe
+ else thisSym.tpe
}
/** Sets the self type of the class */
@@ -2369,8 +2554,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thissym = newThisSym(pos).setInfo(tp)
}
- override def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new ClassSymbol(owner, pos, name)
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = {
+ val clone = owner.newClassSymbol(name, pos, newFlags)
if (thisSym != this) {
clone.typeOfThis = typeOfThis
clone.thisSym.name = thisSym.name
@@ -2381,7 +2566,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def sourceModule =
if (isModuleClass) companionModule else NoSymbol
- private var childSet: Set[Symbol] = Set()
+ private[this] var childSet: Set[Symbol] = Set()
override def children = childSet
override def addChild(sym: Symbol) { childSet = childSet + sym }
@@ -2392,46 +2577,45 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* Note: Not all module classes are of this type; when unpickled, we get
* plain class symbols!
*/
- class ModuleClassSymbol(owner: Symbol, pos: Position, name: TypeName)
+ class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName)
extends ClassSymbol(owner, pos, name) {
private var module: Symbol = null
- def this(module: TermSymbol) = {
- this(module.owner, module.pos, module.name.toTypeName)
- setFlag(module.getFlag(ModuleToClassFlags) | MODULE)
- sourceModule = module
- }
- override def sourceModule = module
private var implicitMembersCacheValue: List[Symbol] = List()
private var implicitMembersCacheKey1: Type = NoType
private var implicitMembersCacheKey2: ScopeEntry = null
+
def implicitMembers: List[Symbol] = {
val tp = info
if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
- implicitMembersCacheKey1 = tp
- implicitMembersCacheKey2 = tp.decls.elems
- implicitMembersCacheValue = tp.implicitMembers
+ // Skip a package object class, because the members are also in
+ // the package and we wish to avoid spurious ambiguities as in pos/t3999.
+ if (!isPackageObjectClass) {
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
+ implicitMembersCacheValue = tp.implicitMembers
+ }
}
implicitMembersCacheValue
}
+ override def sourceModule = module
override def sourceModule_=(module: Symbol) { this.module = module }
}
- class FreeVar(name: TermName, tpe: Type, val value: Any) extends TermSymbol(definitions.RootClass, NoPosition, name) {
- setInfo(tpe)
-
- override def hashCode = value.hashCode
-
+ class FreeVar(name0: TermName, val value: Any) extends TermSymbol(NoSymbol, NoPosition, name0) {
+ override def hashCode = if (value == null) 0 else value.hashCode
override def equals(other: Any): Boolean = other match {
case that: FreeVar => this.value.asInstanceOf[AnyRef] eq that.value.asInstanceOf[AnyRef]
- case _ => false
+ case _ => false
}
}
/** An object representing a missing symbol */
- object NoSymbol extends Symbol(null, NoPosition, nme.NO_NAME) {
- setInfo(NoType)
- privateWithin = this
- override def info_=(info: Type) {
+ class NoSymbol protected[Symbols]() extends Symbol(null, NoPosition, nme.NO_NAME) {
+ synchronized {
+ setInfo(NoType)
+ privateWithin = this
+ }
+ override def info_=(info: Type) = {
infos = TypeHistory(1, NoType, null)
unlock()
validTo = currentPeriod
@@ -2441,22 +2625,31 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def defString: String = toString
override def locationString: String = ""
override def enclClass: Symbol = this
- override def toplevelClass: Symbol = this
+ override def enclosingTopLevelClass: Symbol = this
override def enclMethod: Symbol = this
- override def owner: Symbol = abort("no-symbol does not have owner")
override def sourceFile: AbstractFileType = null
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
override def reset(completer: Type) {}
override def info: Type = NoType
+ override def existentialBound: Type = NoType
override def rawInfo: Type = NoType
protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol): Symbol = abort()
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort()
override def originalEnclosingMethod = this
+
+ override def owner: Symbol =
+ abort("no-symbol does not have an owner (this is a bug: scala " + scala.util.Properties.versionString + ")")
+ override def typeConstructor: Type =
+ abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
}
+ protected def makeNoSymbol = new NoSymbol
+
+ lazy val NoSymbol = makeNoSymbol
+
/** Derives a new list of symbols from the given list by mapping the given
* list across the given function. Then fixes the info of all the new symbols
* by substituting the new symbols for the original symbols.
@@ -2520,17 +2713,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val syms1 = cloneSymbolsAtOwner(syms, owner)
creator(syms1, tpe.substSym(syms, syms1))
}
-
- /** Create a new existential type skolem with the given owner and origin.
+
+ /** A deep map on a symbol's paramss.
*/
- def newExistentialSkolem(sym: Symbol, owner: Symbol, origin: AnyRef): TypeSkolem = {
- val skolem = new TypeSkolem(owner, sym.pos, sym.name.toTypeName, origin)
- ( skolem
- setInfo (sym.info cloneInfo skolem)
- setFlag (sym.flags | EXISTENTIAL)
- resetFlag PARAM
- )
- }
+ def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
/** An exception for cyclic references of symbol definitions */
case class CyclicReference(sym: Symbol, info: Type)
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 1dc93a7add..e3ee39d2a0 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -107,7 +107,15 @@ abstract class TreeInfo {
@deprecated("Use isExprSafeToInline instead", "2.10.0")
def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
- def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = {
+ def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
+ mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+
+ def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
+ val b = List.newBuilder[R]
+ foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
+ b.result
+ }
+ def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
val plen = params.length
val alen = args.length
def fail() = {
@@ -116,27 +124,29 @@ abstract class TreeInfo {
" params = " + params + "\n" +
" args = " + args + "\n"
)
- params zip args
+ false
}
- if (plen == alen) params zip args
- else if (params.isEmpty) fail
+ if (plen == alen) foreach2(params, args)(f)
+ else if (params.isEmpty) return fail
else if (isVarArgsList(params)) {
val plenInit = plen - 1
if (alen == plenInit) {
if (alen == 0) Nil // avoid calling mismatched zip
- else params.init zip args
+ else foreach2(params.init, args)(f)
}
- else if (alen < plenInit) fail
+ else if (alen < plenInit) return fail
else {
- val front = params.init zip (args take plenInit)
- val back = args drop plenInit map (a => (params.last, a))
- front ++ back
+ foreach2(params.init, args take plenInit)(f)
+ val remainingArgs = args drop plenInit
+ foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
}
}
- else fail
- }
+ else return fail
+ true
+ }
+
/**
* Selects the correct parameter list when there are nested applications.
* Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
@@ -144,22 +154,28 @@ abstract class TreeInfo {
* applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
* correspond to the third parameter list.
*
+ * The argument fn is the function part of the apply node being considered.
+ *
* Also accounts for varargs.
*/
+ private def applyMethodParameters(fn: Tree): List[Symbol] = {
+ val depth = applyDepth(fn)
+ // There could be applies which go beyond the parameter list(s),
+ // being applied to the result of the method call.
+ // !!! Note that this still doesn't seem correct, although it should
+ // be closer than what it replaced.
+ if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
+ else if (fn.symbol.paramss.isEmpty) Nil
+ else fn.symbol.paramss.last
+ }
+
def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
- case Apply(fn, args) =>
- val depth = applyDepth(fn)
- // There could be applies which go beyond the parameter list(s),
- // being applied to the result of the method call.
- // !!! Note that this still doesn't seem correct, although it should
- // be closer than what it replaced.
- val params = (
- if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
- else if (fn.symbol.paramss.isEmpty) Nil
- else fn.symbol.paramss.last
- )
- zipMethodParamsAndArgs(params, args)
- case _ => Nil
+ case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
+ case _ => Nil
+ }
+ def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
+ case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
+ case _ =>
}
/** Is symbol potentially a getter of a variable?
@@ -176,7 +192,7 @@ abstract class TreeInfo {
def isVariableOrGetter(tree: Tree) = {
def sym = tree.symbol
def isVar = sym.isVariable
- def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name)) != NoSymbol
+ def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name.toTermName)) != NoSymbol
tree match {
case Ident(_) => isVar
diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala
index dcc395ddd2..2b1d833c73 100644
--- a/src/compiler/scala/reflect/internal/TreePrinters.scala
+++ b/src/compiler/scala/reflect/internal/TreePrinters.scala
@@ -24,13 +24,32 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
def quotedName(name: Name): String = quotedName(name, false)
+ private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
+ val sym = tree.symbol
+ if (sym != null && sym != NoSymbol) {
+ val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
+ var suffix = ""
+ if (settings.uniqid.value) suffix += ("#" + sym.id)
+ if (settings.Yshowsymkinds.value) suffix += ("#" + sym.abbreviatedKindString)
+ prefix + tree.symbol.decodedName + suffix
+ } else {
+ quotedName(name, decoded)
+ }
+ }
+
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+
/** Turns a path into a String, introducing backquotes
* as necessary.
*/
- def backquotedPath(t: Tree): String = t match {
- case Select(qual, name) => "%s.%s".format(backquotedPath(qual), quotedName(name))
- case Ident(name) => quotedName(name)
- case _ => t.toString
+ def backquotedPath(t: Tree): String = {
+ t match {
+ case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), symName(t, name))
+ case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), symName(t, name))
+ case Ident(name) => symName(t, name)
+ case _ => t.toString
+ }
}
class TreePrinter(out: PrintWriter) extends super.TreePrinter {
@@ -118,18 +137,6 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
}
private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false)
- private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
- def nameFn(sym: Symbol) = {
- val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
- val suffix = if (uniqueIds) "#"+sym.id else ""
- prefix + tree.symbol.decodedName + suffix
- }
- symFn(tree, nameFn, quotedName(name, decoded))
- }
-
- def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
- def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
-
def printOpt(prefix: String, tree: Tree) {
if (!tree.isEmpty) { print(prefix, tree) }
}
@@ -292,6 +299,9 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case Assign(lhs, rhs) =>
print(lhs, " = ", rhs)
+ case AssignOrNamedArg(lhs, rhs) =>
+ print(lhs, " = ", rhs)
+
case If(cond, thenp, elsep) =>
print("if (", cond, ")"); indent; println()
print(thenp); undent
@@ -397,7 +407,6 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
// case SelectFromArray(qualifier, name, _) =>
// print(qualifier); print(".<arr>"); print(symName(tree, name))
-
case tree =>
xprintTree(this, tree)
}
@@ -413,7 +422,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
case name: Name =>
print(quotedName(name))
case arg =>
- out.print(arg.toString)
+ out.print(if (arg == null) "null" else arg.toString)
}
}
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 566ee0e9cf..958d04732b 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -72,9 +72,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
def withPosition(flag: Long, position: Position) =
copy() setPositions positions + (flag -> position)
- override def hasModifier(mod: Modifier.Value) =
+ override def hasModifier(mod: Modifier) =
hasFlag(flagOfModifier(mod))
- override def allModifiers: Set[Modifier.Value] =
+ override def modifiers: Set[Modifier] =
Modifier.values filter hasModifier
override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
Modifiers(flags, privateWithin, f(annotations)) setPositions positions
@@ -85,7 +85,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
def Modifiers(flags: Long): Modifiers = Modifiers(flags, tpnme.EMPTY)
- def Modifiers(mods: Set[Modifier.Value],
+ def Modifiers(mods: Set[Modifier],
privateWithin: Name,
annotations: List[Tree]): Modifiers = {
val flagSet = mods map flagOfModifier
@@ -121,9 +121,17 @@ trait Trees extends api.Trees { self: SymbolTable =>
new ChangeOwnerTraverser(oldOwner, newOwner) apply t
}
}
-
+
+ def substTreeSyms(pairs: (Symbol, Symbol)*): Tree = {
+ val list = pairs.toList
+ val subst = new TreeSymSubstituter(list map (_._1), list map (_._2))
+ subst(tree)
+ }
def shallowDuplicate: Tree = new ShallowDuplicator(tree) transform tree
def shortClass: String = tree.getClass.getName split "[.$]" last
+
+ def isErrorTyped = (tree.tpe ne null) && tree.tpe.isError
+
/** When you want to know a little more than the class, but a lot
* less than the whole tree.
*/
@@ -160,12 +168,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
*/
def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
atPos(sym.pos) {
- ModuleDef(Modifiers(sym.flags), sym.name, impl) setSymbol sym
+ ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
}
def ValDef(sym: Symbol, rhs: Tree): ValDef =
atPos(sym.pos) {
- ValDef(Modifiers(sym.flags), sym.name,
+ ValDef(Modifiers(sym.flags), sym.name.toTermName,
TypeTree(sym.tpe) setPos focusPos(sym.pos),
rhs) setSymbol sym
}
@@ -181,8 +189,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
- DefDef(Modifiers(sym.flags),
- sym.name,
+ DefDef(mods,
+ sym.name.toTermName,
sym.typeParams map TypeDef,
vparamss,
TypeTree(sym.tpe.finalResultType) setPos focusPos(sym.pos),
@@ -193,14 +201,13 @@ trait Trees extends api.Trees { self: SymbolTable =>
DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
- DefDef(sym, mods, sym.paramss map (_.map(ValDef)), rhs)
+ DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
def DefDef(sym: Symbol, rhs: Tree): DefDef =
DefDef(sym, Modifiers(sym.flags), rhs)
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = {
+ def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef =
DefDef(sym, rhs(sym.info.paramss))
- }
/** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
@@ -214,7 +221,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
atPos(sym.pos) {
- LabelDef(sym.name, params map Ident, rhs) setSymbol sym
+ LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
}
@@ -224,15 +231,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
def Bind(sym: Symbol, body: Tree): Bind =
Bind(sym.name, body) setSymbol sym
-
- /** Factory method for object creation `new tpt(args_1)...(args_n)`
- * A `New(t, as)` is expanded to: `(new t).<init>(as)`
- */
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
- assert(!argss.isEmpty)
- val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
- (superRef /: argss) (Apply)
- }
/** 0-1 argument list new, based on a symbol.
*/
def New(sym: Symbol, args: Tree*): Tree =
@@ -244,14 +242,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
def Super(sym: Symbol, mix: TypeName): Tree = Super(This(sym), mix)
- def This(sym: Symbol): Tree = This(sym.name.toTypeName) setSymbol sym
-
/** Block factory that flattens directly nested blocks.
*/
- def Block(stats: Tree*): Block = stats match {
- case Seq(b @ Block(_, _)) => b
- case Seq(stat) => Block(stats.toList, Literal(Constant(())))
- case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
+ def Block(stats: Tree*): Block = {
+ if (stats.isEmpty) Block(Nil, Literal(Constant(())))
+ else stats match {
+ case Seq(b @ Block(_, _)) => b
+ case Seq(stat) => Block(stats.toList, Literal(Constant(())))
+ case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
+ }
}
// --- specific traversers and transformers
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 47184eee51..adf9df185a 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -87,11 +87,20 @@ trait Types extends api.Types { self: SymbolTable =>
private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
private final val printLubs = sys.props contains "scalac.debug.lub"
+ private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
/** In case anyone wants to turn off lub verification without reverting anything. */
private final val verifyLubs = true
+ /** In case anyone wants to turn off type parameter bounds being used
+ * to seed type constraints.
+ */
+ private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ /** Empty immutable maps to avoid allocations. */
+ private val emptySymMap = immutable.Map[Symbol, Symbol]()
+ private val emptySymCount = immutable.Map[Symbol, Int]()
+
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
@@ -100,15 +109,19 @@ trait Types extends api.Types { self: SymbolTable =>
/** A log of type variable with their original constraints. Used in order
* to undo constraints in the case of isSubType/isSameType failure.
*/
- object undoLog {
- private type UndoLog = List[(TypeVar, TypeConstraint)]
- private[scala] var log: UndoLog = List()
-
+ lazy val undoLog = newUndoLog
+
+ protected def newUndoLog = new UndoLog
+
+ class UndoLog {
+ private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ private var log: UndoPairs = List()
+
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
/** Undo all changes to constraints to type variables upto `limit`. */
- private def undoTo(limit: UndoLog) {
+ private def undoTo(limit: UndoPairs) {
while ((log ne limit) && log.nonEmpty) {
val (tv, constr) = log.head
tv.constr = constr
@@ -116,9 +129,14 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- private[Types] def record(tv: TypeVar) = {
+ /** No sync necessary, because record should only
+ * be called from within a undo or undoUnless block,
+ * which is already synchronized.
+ */
+ private[reflect] def record(tv: TypeVar) = {
log ::= ((tv, tv.constr.cloneInternal))
}
+
private[scala] def clear() {
if (settings.debug.value)
self.log("Clearing " + log.size + " entries from the undoLog.")
@@ -240,10 +258,10 @@ trait Types extends api.Types { self: SymbolTable =>
abstract class AbsTypeImpl extends AbsType { this: Type =>
def declaration(name: Name): Symbol = decl(name)
def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
- def allDeclarations = decls
- def allMembers = members
+ def declarations = decls
def typeArguments = typeArgs
def erasedType = transformedType(this)
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
}
/** The base class for all types */
@@ -313,13 +331,20 @@ trait Types extends api.Types { self: SymbolTable =>
/** The type symbol associated with the type
* Note that the symbol of the normalized type is returned (@see normalize)
+ * A type's typeSymbol should if possible not be inspected directly, due to
+ * the likelihood that what is true for tp.typeSymbol is not true for
+ * tp.sym, due to normalization.
*/
def typeSymbol: Symbol = NoSymbol
- /** The term symbol ''directly'' associated with the type. */
+ /** The term symbol ''directly'' associated with the type.
+ */
def termSymbolDirect: Symbol = termSymbol
- /** The type symbol ''directly'' associated with the type. */
+ /** The type symbol ''directly'' associated with the type.
+ * In other words, no normalization is performed: if this is an alias type,
+ * the symbol returned is that of the alias, not the underlying type.
+ */
def typeSymbolDirect: Symbol = typeSymbol
/** The base type underlying a type proxy, identity on all other types */
@@ -403,6 +428,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** For a typeref, its arguments. The empty list for all other types */
def typeArgs: List[Type] = List()
+
+ /** A list of placeholder types derived from the type parameters.
+ * Used by RefinedType and TypeRef.
+ */
+ protected def dummyArgs: List[Type] = typeParams map (_.typeConstructor)
/** For a (nullary) method or poly type, its direct result type,
* the type itself for all other types. */
@@ -484,7 +514,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
-
/** For a classtype or refined type, its defined or declared members;
* inherited by subtypes and typerefs.
* The empty scope for all other types.
@@ -496,6 +525,9 @@ trait Types extends api.Types { self: SymbolTable =>
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
+
+ /** A list of all non-private members defined or declared in this type. */
+ def nonPrivateDecls: List[Symbol] = decls filter (x => !x.isPrivate) toList
/** The non-private defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
@@ -507,47 +539,67 @@ trait Types extends api.Types { self: SymbolTable =>
* Members appear in linearization order of their owners.
* Members with the same owner appear in reverse order of their declarations.
*/
- def members: List[Symbol] = findMember(nme.ANYNAME, 0, 0, false).alternatives
+ def members: List[Symbol] = membersBasedOnFlags(0, 0)
/** A list of all non-private members of this type (defined or inherited) */
- def nonPrivateMembers: List[Symbol] =
- findMember(nme.ANYNAME, PRIVATE | BridgeFlags, 0, false).alternatives
+ def nonPrivateMembers: List[Symbol] = membersBasedOnFlags(BridgeAndPrivateFlags, 0)
/** A list of all non-private members of this type (defined or inherited),
* admitting members with given flags `admit`
*/
- def nonPrivateMembersAdmitting(admit: Long): List[Symbol] =
- findMember(nme.ANYNAME, (PRIVATE | BridgeFlags) & ~admit, 0, false).alternatives
+ def nonPrivateMembersAdmitting(admit: Long): List[Symbol] = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: List[Symbol] =
- findMember(nme.ANYNAME, BridgeFlags, IMPLICIT, false).alternatives
+ def implicitMembers: List[Symbol] = membersBasedOnFlags(BridgeFlags, IMPLICIT)
/** A list of all deferred symbols of this type (defined or inherited) */
- def deferredMembers: List[Symbol] =
- findMember(nme.ANYNAME, BridgeFlags, DEFERRED, false).alternatives
+ def deferredMembers: List[Symbol] = membersBasedOnFlags(BridgeFlags, DEFERRED)
/** The member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
- def member(name: Name): Symbol = findMember(name, BridgeFlags, 0, false)
+ def member(name: Name): Symbol =
+ memberBasedOnName(name, BridgeFlags)
/** The non-private member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Bridges are excluded from the result
*/
def nonPrivateMember(name: Name): Symbol =
- findMember(name, PRIVATE | BridgeFlags, 0, false)
+ memberBasedOnName(name, BridgeAndPrivateFlags)
+
+ /** All members with the given flags, excluding bridges.
+ */
+ def membersWithFlags(requiredFlags: Long): List[Symbol] =
+ membersBasedOnFlags(BridgeFlags, requiredFlags)
- /** The non-private member with given name, admitting members with given flags `admit`
- * an OverloadedSymbol if several exist, NoSymbol if none exist
+ /** All non-private members with the given flags, excluding bridges.
+ */
+ def nonPrivateMembersWithFlags(requiredFlags: Long): List[Symbol] =
+ membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
+
+ /** The non-private member with given name, admitting members with given flags `admit`.
+ * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
+ * flag are usually excluded from findMember results, but supplying any of those flags
+ * to this method disables that exclusion.
+ *
+ * An OverloadedSymbol if several exist, NoSymbol if none exists.
*/
def nonPrivateMemberAdmitting(name: Name, admit: Long): Symbol =
- findMember(name, (PRIVATE | BridgeFlags) & ~admit, 0, false)
+ memberBasedOnName(name, BridgeAndPrivateFlags & ~admit)
/** The non-local member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def nonLocalMember(name: Name): Symbol =
- findMember(name, LOCAL | BridgeFlags, 0, false)
+ memberBasedOnName(name, BridgeFlags | LOCAL)
+
+ /** Members excluding and requiring the given flags.
+ * Note: unfortunately it doesn't work to exclude DEFERRED this way.
+ */
+ def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): List[Symbol] =
+ findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
+
+ def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
+ findMember(name, excludedFlags, 0, false)
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -568,7 +620,7 @@ trait Types extends api.Types { self: SymbolTable =>
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
*/
- def asSeenFrom(pre: Type, clazz: Symbol): Type =
+ def asSeenFrom(pre: Type, clazz: Symbol): Type = {
if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
@@ -584,6 +636,7 @@ trait Types extends api.Types { self: SymbolTable =>
stopTimer(asSeenFromNanos, start)
result
}
+ }
/** The info of `sym`, seen as a member of this type.
*
@@ -829,16 +882,7 @@ trait Types extends api.Types { self: SymbolTable =>
* after `maxTostringRecursions` recursion levels. Uses `safeToString`
* to produce a string on each level.
*/
- override def toString: String =
- if (tostringRecursions >= maxTostringRecursions)
- "..."
- else
- try {
- tostringRecursions += 1
- safeToString
- } finally {
- tostringRecursions -= 1
- }
+ override def toString: String = typeToString(this)
/** Method to be implemented in subclasses.
* Converts this type to a string in calling toString for its parts.
@@ -899,30 +943,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- var suspension: mutable.HashSet[TypeVar] = null
// if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
// replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
// without this, the matchesType call would lead to type variables on both sides
// of a subtyping/equality judgement, which can lead to recursive types being constructed.
// See (t0851) for a situation where this happens.
- if (!this.isGround) {
- // PP: The foreach below was formerly expressed as:
- // for(tv @ TypeVar(_, _) <- this) { suspension suspend tv }
- //
- // The tree checker failed this saying a TypeVar is required, but a (Type @unchecked) was found.
- // This is a consequence of using a pattern match and variable binding + ticket #1503, which
- // was addressed by weakening the type of bindings in pattern matches if they occur on the right.
- // So I'm not quite sure why this works at all, as the checker is right that it is mistyped.
- // For now I modified it as below, which achieves the same without error.
- //
- // make each type var in this type use its original type for comparisons instead of collecting constraints
- val susp = new mutable.HashSet[TypeVar] // use a local val so it remains unboxed
- this foreach {
- case tv: TypeVar => tv.suspended = true; susp += tv
- case _ =>
- }
- suspension = susp
- }
+ val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
incCounter(findMemberCount)
val start = startTimer(findMemberNanos)
@@ -966,7 +992,9 @@ trait Types extends api.Types { self: SymbolTable =>
if (membertpe eq null) membertpe = self.memberType(member)
(membertpe matches self.memberType(sym))
})) {
- members = new Scope(List(member, sym))
+ members = newScope
+ members enter member
+ members enter sym
}
} else {
var prevEntry = members.lookupEntry(sym.name)
@@ -1006,7 +1034,6 @@ trait Types extends api.Types { self: SymbolTable =>
baseClasses.head.newOverloaded(this, members.toList)
}
}
-
/** The existential skolems and existentially quantified variables which are free in this type */
def existentialSkolems: List[Symbol] = {
var boundSyms: List[Symbol] = List()
@@ -1080,7 +1107,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** A base class for types that represent a single value
* (single-types and this-types).
*/
- abstract class SingletonType extends SubType with SimpleTypeProxy with AbsSingletonType {
+ abstract class SingletonType extends SubType with SimpleTypeProxy {
def supertype = underlying
override def isTrivial = false
override def isStable = true
@@ -1179,7 +1206,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (settings.debug.value) sym.nameString + ".this."
else if (sym.isAnonOrRefinementClass) "this."
else if (sym.isOmittablePrefix) ""
- else if (sym.isModuleClass) sym.fullName + "."
+ else if (sym.isModuleClass) sym.fullNameString + "."
else sym.nameString + ".this."
override def safeToString: String =
if (sym.isRoot) "<root>"
@@ -1206,18 +1233,15 @@ trait Types extends api.Types { self: SymbolTable =>
override val isTrivial: Boolean = pre.isTrivial
// override def isNullable = underlying.isNullable
override def isNotNull = underlying.isNotNull
- private var underlyingCache: Type = NoType
- private var underlyingPeriod = NoPeriod
+ private[reflect] var underlyingCache: Type = NoType
+ private[reflect] var underlyingPeriod = NoPeriod
override def underlying: Type = {
- val period = underlyingPeriod
- if (period != currentPeriod) {
- underlyingPeriod = currentPeriod
- if (!isValid(period)) {
- underlyingCache = pre.memberType(sym).resultType;
- assert(underlyingCache ne this, this)
- }
+ val cache = underlyingCache
+ if (underlyingPeriod == currentPeriod && cache != null) cache
+ else {
+ defineUnderlyingOfSingleType(this)
+ underlyingCache
}
- underlyingCache
}
// more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
@@ -1256,6 +1280,17 @@ trait Types extends api.Types { self: SymbolTable =>
unique(new UniqueSingleType(pre, sym))
}
}
+
+ protected def defineUnderlyingOfSingleType(tpe: SingleType) = {
+ val period = tpe.underlyingPeriod
+ if (period != currentPeriod) {
+ tpe.underlyingPeriod = currentPeriod
+ if (!isValid(period)) {
+ tpe.underlyingCache = tpe.pre.memberType(tpe.sym).resultType;
+ assert(tpe.underlyingCache ne tpe, tpe)
+ }
+ }
+ }
abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
@@ -1287,6 +1322,7 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
+ def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass)
// override def isNullable: Boolean = NullClass.tpe <:< lo;
override def safeToString = ">: " + lo + " <: " + hi
override def kind = "TypeBoundsType"
@@ -1307,105 +1343,36 @@ trait Types extends api.Types { self: SymbolTable =>
*/
abstract class CompoundType extends Type {
- var baseTypeSeqCache: BaseTypeSeq = _
- private var baseTypeSeqPeriod = NoPeriod
- private var baseClassesCache: List[Symbol] = _
- private var baseClassesPeriod = NoPeriod
+ private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
+ private[reflect] var baseTypeSeqPeriod = NoPeriod
+ private[reflect] var baseClassesCache: List[Symbol] = _
+ private[reflect] var baseClassesPeriod = NoPeriod
override def baseTypeSeq: BaseTypeSeq = {
- val period = baseTypeSeqPeriod;
- if (period != currentPeriod) { // no caching in IDE
- baseTypeSeqPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- if (parents.exists(_.exists(_.isInstanceOf[TypeVar]))) {
- // rename type vars to fresh type params, take base type sequence of
- // resulting type, and rename back all the entries in that sequence
- var tvs = Set[TypeVar]()
- for (p <- parents)
- for (t <- p) t match {
- case tv: TypeVar => tvs += tv
- case _ =>
- }
- val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
- val paramToVarMap = varToParamMap map (_.swap)
- val varToParam = new TypeMap {
- def apply(tp: Type) = varToParamMap get tp match {
- case Some(sym) => sym.tpe
- case _ => mapOver(tp)
- }
- }
- val paramToVar = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym)
- case _ => mapOver(tp)
- }
- }
- val bts = copyRefinedType(this.asInstanceOf[RefinedType], parents map varToParam, varToParam mapOver decls).baseTypeSeq
- baseTypeSeqCache = bts lateMap paramToVar
- } else {
- incCounter(compoundBaseTypeSeqCount)
- baseTypeSeqCache = undetBaseTypeSeq
- baseTypeSeqCache = if (typeSymbol.isRefinementClass)
- memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe)
- else
- compoundBaseTypeSeq(this)
- // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
- // when compiling with
- // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
- // I have not yet figured out precisely why this is the case.
- // My current assumption is that taking memos forces baseTypeSeqs to be computed
- // at stale types (i.e. the underlying typeSymbol has already another type).
- // I do not yet see precisely why this would cause a problem, but it looks
- // fishy in any case.
- }
- }
- //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
+ val cached = baseTypeSeqCache
+ if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
+ cached
+ else {
+ defineBaseTypeSeqOfCompoundType(this)
+ if (baseTypeSeqCache eq undetBaseTypeSeq)
+ throw new RecoverableCyclicReference(typeSymbol)
+
+ baseTypeSeqCache
}
- if (baseTypeSeqCache eq undetBaseTypeSeq)
- throw new TypeError("illegal cyclic inheritance involving " + typeSymbol)
- baseTypeSeqCache
}
override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
override def baseClasses: List[Symbol] = {
- def computeBaseClasses: List[Symbol] =
- if (parents.isEmpty) List(typeSymbol)
- else {
- //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
- // optimized, since this seems to be performance critical
- val superclazz = parents.head
- var mixins = parents.tail
- val sbcs = superclazz.baseClasses
- var bcs = sbcs
- def isNew(clazz: Symbol): Boolean = (
- superclazz.baseTypeIndex(clazz) < 0 &&
- { var p = bcs;
- while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
- p eq sbcs
- }
- );
- while (!mixins.isEmpty) {
- def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
- if (mbcs.isEmpty) bcs
- else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
- else addMixinBaseClasses(mbcs.tail);
- bcs = addMixinBaseClasses(mixins.head.baseClasses)
- mixins = mixins.tail
- }
- typeSymbol :: bcs
- }
- val period = baseClassesPeriod
- if (period != currentPeriod) {
- baseClassesPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- baseClassesCache = null
- baseClassesCache = memo(computeBaseClasses)(typeSymbol :: _.baseClasses.tail)
- }
+ val cached = baseClassesCache
+ if (baseClassesPeriod == currentPeriod && cached != null) cached
+ else {
+ defineBaseClassesOfCompoundType(this)
+ if (baseClassesCache eq null)
+ throw new RecoverableCyclicReference(typeSymbol)
+
+ baseClassesCache
}
- if (baseClassesCache eq null)
- throw new TypeError("illegal cyclic reference involving " + typeSymbol)
- baseClassesCache
}
/** The slightly less idiomatic use of Options is due to
@@ -1443,12 +1410,103 @@ trait Types extends api.Types { self: SymbolTable =>
// override def isNullable: Boolean =
// parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
-
+
override def safeToString: String =
parents.mkString(" with ") +
(if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
decls.mkString("{", "; ", "}") else "")
}
+
+ protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
+ val period = tpe.baseTypeSeqPeriod;
+ if (period != currentPeriod) {
+ tpe.baseTypeSeqPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ if (tpe.parents.exists(_.exists(_.isInstanceOf[TypeVar]))) {
+ // rename type vars to fresh type params, take base type sequence of
+ // resulting type, and rename back all the entries in that sequence
+ var tvs = Set[TypeVar]()
+ for (p <- tpe.parents)
+ for (t <- p) t match {
+ case tv: TypeVar => tvs += tv
+ case _ =>
+ }
+ val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
+ val paramToVarMap = varToParamMap map (_.swap)
+ val varToParam = new TypeMap {
+ def apply(tp: Type) = varToParamMap get tp match {
+ case Some(sym) => sym.tpe
+ case _ => mapOver(tp)
+ }
+ }
+ val paramToVar = new TypeMap {
+ def apply(tp: Type) = tp match {
+ case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym)
+ case _ => mapOver(tp)
+ }
+ }
+ val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
+ tpe.baseTypeSeqCache = bts lateMap paramToVar
+ } else {
+ incCounter(compoundBaseTypeSeqCount)
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
+ // when compiling with
+ // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
+ // I have not yet figured out precisely why this is the case.
+ // My current assumption is that taking memos forces baseTypeSeqs to be computed
+ // at stale types (i.e. the underlying typeSymbol has already another type).
+ // I do not yet see precisely why this would cause a problem, but it looks
+ // fishy in any case.
+ }
+ }
+ }
+ //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
+ if (tpe.baseTypeSeqCache eq undetBaseTypeSeq)
+ throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
+ }
+
+ protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
+ def computeBaseClasses: List[Symbol] =
+ if (tpe.parents.isEmpty) List(tpe.typeSymbol)
+ else {
+ //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
+ // optimized, since this seems to be performance critical
+ val superclazz = tpe.parents.head
+ var mixins = tpe.parents.tail
+ val sbcs = superclazz.baseClasses
+ var bcs = sbcs
+ def isNew(clazz: Symbol): Boolean =
+ superclazz.baseTypeIndex(clazz) < 0 &&
+ { var p = bcs;
+ while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
+ p eq sbcs
+ }
+ while (!mixins.isEmpty) {
+ def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
+ if (mbcs.isEmpty) bcs
+ else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
+ else addMixinBaseClasses(mbcs.tail)
+ bcs = addMixinBaseClasses(mixins.head.baseClasses)
+ mixins = mixins.tail
+ }
+ tpe.typeSymbol :: bcs
+ }
+ val period = tpe.baseClassesPeriod
+ if (period != currentPeriod) {
+ tpe.baseClassesPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ tpe.baseClassesCache = null
+ tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
+ }
+ }
+ if (tpe.baseClassesCache eq null)
+ throw new TypeError("illegal cyclic reference involving " + tpe.typeSymbol)
+ }
/** A class representing intersection types with refinements of the form
* `<parents_0> with ... with <parents_n> { decls }`
@@ -1472,8 +1530,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeConstructor =
copyRefinedType(this, parents map (_.typeConstructor), decls)
- private def dummyArgs = typeParams map (_.typeConstructor)
-
/* MO to AM: This is probably not correct
* If they are several higher-kinded parents with different bounds we need
* to take the intersection of their bounds
@@ -1559,7 +1615,7 @@ trait Types extends api.Types { self: SymbolTable =>
* by a path which contains at least one expansive reference.
* @See Kennedy, Pierce: On Decidability of Nominal Subtyping with Variance
*/
- def expansiveRefs(tparam: Symbol) = {
+ private[scala] def expansiveRefs(tparam: Symbol) = {
if (state == UnInitialized) {
computeRefs()
while (state != Initialized) propagate()
@@ -1573,10 +1629,16 @@ trait Types extends api.Types { self: SymbolTable =>
/** The type parameters which are referenced type parameters of this class.
* Two entries: refs(0): Non-expansive references
* refs(1): Expansive references
+ * Syncnote: This var need not be protected with synchronized, because
+ * it is accessed only from expansiveRefs, which is called only from
+ * Typer.
*/
private var refs: Array[RefMap] = _
/** The initialization state of the class: UnInialized --> Initializing --> Initialized
+ * Syncnote: This var need not be protected with synchronized, because
+ * it is accessed only from expansiveRefs, which is called only from
+ * Typer.
*/
private var state = UnInitialized
@@ -1613,29 +1675,41 @@ trait Types extends api.Types { self: SymbolTable =>
// (this can happen only for erroneous programs).
}
+ private object enterRefs extends TypeMap {
+ private var tparam: Symbol = _
+
+ def apply(tp: Type): Type = {
+ tp match {
+ case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
+ val tparams = tr.initializedTypeParams
+ if (settings.debug.value && !sameLength(tparams, args))
+ debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+
+ foreach2(tparams, args) { (tparam1, arg) =>
+ if (arg contains tparam) {
+ addRef(NonExpansive, tparam, tparam1)
+ if (arg.typeSymbol != tparam)
+ addRef(Expansive, tparam, tparam1)
+ }
+ }
+ case _ =>
+ }
+ mapOver(tp)
+ }
+ def enter(tparam0: Symbol, parent: Type) {
+ this.tparam = tparam0
+ this(parent)
+ }
+ }
+
/** Compute initial (one-step) references and set state to `Initializing`.
*/
private def computeRefs() {
refs = Array(Map(), Map())
- for (tparam <- typeSymbol.typeParams) {
- val enterRefs = new TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- if (settings.debug.value && !sameLength(sym.info.typeParams, args))
- debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
-
- for ((tparam1, arg) <- sym.info.typeParams zip args; if arg contains tparam) {
- addRef(NonExpansive, tparam, tparam1)
- if (arg.typeSymbol != tparam)
- addRef(Expansive, tparam, tparam1)
- }
- case _ =>
- }
- mapOver(tp)
- }
+ typeSymbol.typeParams foreach { tparam =>
+ parents foreach { p =>
+ enterRefs.enter(tparam, p)
}
- for (p <- parents) enterRefs(p)
}
state = Initializing
}
@@ -1677,6 +1751,19 @@ trait Types extends api.Types { self: SymbolTable =>
// override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
override def kind = "ClassInfoType"
+
+ override def safeToString =
+ if (settings.debug.value || decls.size > 1)
+ formattedToString
+ else
+ super.safeToString
+
+ /** A nicely formatted string with newlines and such.
+ */
+ def formattedToString: String =
+ parents.mkString("\n with ") +
+ (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ decls.mkString(" {\n ", "\n ", "\n}") else "")
}
object ClassInfoType extends ClassInfoTypeExtractor
@@ -1714,253 +1801,305 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ * with synchronized, because they are accessed only from isVolatile, which is called only from
+ * Typer.
+ */
private var volatileRecursions: Int = 0
private val pendingVolatiles = new mutable.HashSet[Symbol]
+
+ class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) with UniqueType {
+ require(args0.nonEmpty, this)
- /** A class for named types of the form
- * `<prefix>.<sym.name>[args]`
- * Cannot be created directly; one should always use `typeRef`
- * for creation. (@M: Otherwise hashing breaks)
- *
- * @M: a higher-kinded type is represented as a TypeRef with sym.info.typeParams.nonEmpty, but args.isEmpty
- * @param pre ...
- * @param sym ...
- * @param args ...
- */
- abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
-// assert(!sym.isAbstractType || pre.isStable || pre.isError)
-// assert(!pre.isInstanceOf[ClassInfoType], this)
-// assert(!(sym hasFlag (PARAM | EXISTENTIAL)) || pre == NoPrefix, this)
-// assert(args.isEmpty || !sym.info.typeParams.isEmpty, this)
-// assert(args.isEmpty || ((sym ne AnyClass) && (sym ne NothingClass))
-
- private var parentsCache: List[Type] = _
- private var parentsPeriod = NoPeriod
-
- private var baseTypeSeqCache: BaseTypeSeq = _
- private var baseTypeSeqPeriod = NoPeriod
-
- private var symInfoCache: Type = _
- private var memberInfoCache: Type = _
- private var thisInfoCache: Type = _
- private var relativeInfoCache: Type = _
-
- private var normalized: Type = null
-
- override def isStable: Boolean = {
- sym == NothingClass ||
- sym == SingletonClass ||
- sym.isAliasType && normalize.isStable ||
- sym.isAbstractType && (bounds.hi.typeSymbol isSubClass SingletonClass)
- }
-
- override def isVolatile: Boolean = {
- sym.isAliasType && normalize.isVolatile ||
- sym.isAbstractType && {
- // need to be careful not to fall into an infinite recursion here
- // because volatile checking is done before all cycles are detected.
- // the case to avoid is an abstract type directly or
- // indirectly upper-bounded by itself. See #2918
- try {
- volatileRecursions += 1
- if (volatileRecursions < LogVolatileThreshold)
- bounds.hi.isVolatile
- else if (pendingVolatiles(sym))
- true // we can return true here, because a cycle will be detected
- // here afterwards and an error will result anyway.
- else
- try {
- pendingVolatiles += sym
- bounds.hi.isVolatile
- } finally {
- pendingVolatiles -= sym
- }
- } finally {
- volatileRecursions -= 1
- }
- }
- }
-
- override lazy val isTrivial: Boolean =
- !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
+ /** No unapplied type params size it has (should have) equally as many args. */
+ override def isHigherKinded = false
+ override def typeParams = Nil
- override def isNotNull =
- sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull
+ override def transform(tp: Type): Type = {
+ // This situation arises when a typevar is encountered for which
+ // too little information is known to determine its kind, and
+ // it later turns out not to have kind *. See SI-4070. Only
+ // logging it for now.
+ if (sym.typeParams.size != args.size)
+ log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args))
- // @M: propagate actual type params (args) to `tp`, by replacing formal type parameters with actual ones
- // if tp is higher kinded, the "actual" type arguments are types that simply reference the corresponding type parameters (unbound type variables)
- def transform(tp: Type): Type = {
- val res = tp.asSeenFrom(pre, sym.owner)
- if (sym.typeParams.isEmpty || (args exists (_.isError)) || isRaw(sym, args)/*#2266/2305*/) res
- else res.instantiateTypeParams(sym.typeParams, typeArgsOrDummies)
+ asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
}
-
- //@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
- def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
-
- def thisInfo: Type =
- if (sym.isAliasType) normalize
- else if (!sym.isNonClassType) sym.info
- else {
- val symInfo = sym.info
- if (thisInfoCache == null || (symInfo ne symInfoCache)) {
- symInfoCache = symInfo
- thisInfoCache = transformInfo(symInfo) match {
- // If a subtyping cycle is not detected here, we'll likely enter an infinite
- // loop before a sensible error can be issued. SI-5093 is one example.
- case x: SubType if x.supertype eq this =>
- throw new TypeError("illegal cyclic reference involving " + sym)
- case tp => tp
- }
- }
- thisInfoCache
- }
-
- def relativeInfo: Type =
- if (!sym.isNonClassType) pre.memberInfo(sym)
- else {
- val memberInfo = pre.memberInfo(sym)
- if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
- memberInfoCache = memberInfo
- relativeInfoCache = transformInfo(memberInfo)
- }
- relativeInfoCache
+
+ // note: does not go through typeRef. There's no need to because
+ // neither `pre` nor `sym` changes. And there's a performance
+ // advantage to call TypeRef directly.
+ override def typeConstructor = TypeRef(pre, sym, Nil)
+ }
+ class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) with UniqueType {
+ // A reference (in a Scala program) to a type that has type parameters, but where the reference
+ // does not include type arguments. Note that it doesn't matter whether the symbol refers
+ // to a java or scala symbol, but it does matter whether it occurs in java or scala code.
+ // TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
+ // represented as existential types.
+ override def isHigherKinded = typeParams.nonEmpty
+ override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
+ private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
+
+ override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
+ if (isHigherKinded) {
+ if (sameLength(formals intersect typeParams, typeParams))
+ copyTypeRef(this, pre, sym, actuals)
+ // partial application (needed in infer when bunching type arguments from classes and methods together)
+ else
+ copyTypeRef(this, pre, sym, dummyArgs).instantiateTypeParams(formals, actuals)
}
+ else
+ super.instantiateTypeParams(formals, actuals)
- override def typeSymbol = if (sym.isAliasType && (this ne normalize)) normalize.typeSymbol else sym
- override def termSymbol = if (sym.isAliasType && (this ne normalize)) normalize.termSymbol else super.termSymbol
- override def typeSymbolDirect = sym
- override def termSymbolDirect = super.termSymbol
-
-/* @MAT
-whenever you see `tp.typeSymbol.isXXXX` and then act on tp based on that predicate, you're on thin ice,
-as `typeSymbol` (and `prefix`) automatically normalize, but the other inspectors don't.
-In other words, even if `tp.normalize.sym.isXXX` is true, `tp.sym.isXXX` may be false (if sym were a public method to access the non-normalized typeSymbol)...
-
-In retrospect, I think `tp.typeSymbol.isXXX` or (worse) `tp.typeSymbol==XXX` should be replaced by `val tp = tp0.asXXX`.
-A type's typeSymbol should never be inspected directly.
-*/
-
- override def bounds: TypeBounds =
- if (sym.isAbstractType) thisInfo.bounds // transform(thisInfo.bounds).asInstanceOf[TypeBounds] // ??? seems to be doing asSeenFrom twice
- else super.bounds
-
- override def parents: List[Type] = {
- val period = parentsPeriod
- if (period != currentPeriod) {
- parentsPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- parentsCache = thisInfo.parents map transform
- } else if (parentsCache == null) { // seems this can happen if things are currupted enough, see #2641
- parentsCache = List(AnyClass.tpe)
- }
- }
- parentsCache
+ override def transform(tp: Type): Type = {
+ val res = asSeenFromOwner(tp)
+ if (isHigherKinded && !isRaw)
+ res.instantiateTypeParams(typeParams, dummyArgs)
+ else
+ res
}
- override def typeOfThis = transform(sym.typeOfThis)
-/*
- override def narrow =
- if (sym.isModuleClass) transform(sym.thisType)
- else if (sym.isAliasType) normalize.narrow
- else super.narrow
-*/
+ override def transformInfo(tp: Type): Type =
+ appliedType(asSeenFromOwner(tp), dummyArgs)
+
override def narrow =
if (sym.isModuleClass) singleType(pre, sym.sourceModule)
- else if (sym.isAliasType) normalize.narrow
else super.narrow
- override def prefix: Type =
- if (sym.isAliasType) normalize.prefix
- else pre
+ override def typeConstructor = this
+ // eta-expand, subtyping relies on eta-expansion of higher-kinded types
- override def typeArgs: List[Type] = args
- private def typeArgsOrDummies = if (!isHigherKinded) args else dummyArgs
- // def hasFishyArgs = args == dummyArgs
- private def argsMatchTypeParams = sameLength(sym.info.typeParams, args)
+ override protected def normalizeImpl: Type =
+ if (isHigherKinded) etaExpand else super.normalizeImpl
+ }
+
+ trait ClassTypeRef extends TypeRef {
+ // !!! There are scaladoc-created symbols arriving which violate this require.
+ // require(sym.isClass, sym)
+
+ override protected def normalizeImpl: Type =
+ if (sym.isRefinementClass) sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
+ else super.normalizeImpl
- // @MAT was typeSymbol.unsafeTypeParams, but typeSymbol normalizes now
- private def typeParamsDirect =
- if (isDefinitionsInitialized) sym.typeParams
- else sym.unsafeTypeParams
+ override def baseType(clazz: Symbol): Type =
+ if (sym == clazz) this
+ else transform(sym.info.baseType(clazz))
+ }
+
+ trait NonClassTypeRef extends TypeRef {
+ require(sym.isNonClassType, sym)
- // placeholders derived from type params
- private def dummyArgs = {
- // @PP to @AM: this appears to me a place where
- // higher-order tparams are going off the beam.
- // if (sym.isAbstractType) { something goes wrong }
+ /* Syncnote: These are pure caches for performance; no problem to evaluate these
+ * several times. Hence, no need to protected with synchronzied in a mutli-threaded
+ * usage scenario.
+ */
+ private var relativeInfoCache: Type = _
+ private var memberInfoCache: Type = _
- //@M must be .typeConstructor
- typeParamsDirect map (_.typeConstructor)
+ private[Types] def relativeInfo = {
+ val memberInfo = pre.memberInfo(sym)
+ if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
+ memberInfoCache = memberInfo
+ relativeInfoCache = transformInfo(memberInfo)
+ }
+ relativeInfoCache
}
+
+ override def baseType(clazz: Symbol): Type =
+ if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz)
+ }
+
+ protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try {
+ basetypeRecursions += 1
+ if (basetypeRecursions < LogPendingBaseTypesThreshold)
+ tpe.relativeInfo.baseType(clazz)
+ else if (pendingBaseTypes contains tpe)
+ if (clazz == AnyClass) clazz.tpe else NoType
+ else
+ try {
+ pendingBaseTypes += tpe
+ tpe.relativeInfo.baseType(clazz)
+ } finally {
+ pendingBaseTypes -= tpe
+ }
+ } finally {
+ basetypeRecursions -= 1
+ }
+
+ trait AliasTypeRef extends NonClassTypeRef {
+ require(sym.isAliasType, sym)
+
+ override def dealias = if (typeParamsMatchArgs) betaReduce.dealias else super.dealias
+ override def isStable = normalize.isStable
+ override def isVolatile = normalize.isVolatile
+ override def narrow = normalize.narrow
+ override def thisInfo = normalize
+ override def prefix = if (this ne normalize) normalize.prefix else pre
+ override def termSymbol = if (this ne normalize) normalize.termSymbol else super.termSymbol
+ override def typeSymbol = if (this ne normalize) normalize.typeSymbol else sym
+
+ // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
+ override protected def normalizeImpl =
+ if (typeParamsMatchArgs) betaReduce.normalize
+ else if (isHigherKinded) super.normalizeImpl
+ else ErrorType
+
+ // isHKSubType0 introduces synthetic type params so that
+ // betaReduce can first apply sym.info to typeArgs before calling
+ // asSeenFrom. asSeenFrom then skips synthetic type params, which
+ // are used to reduce HO subtyping to first-order subtyping, but
+ // which can't be instantiated from the given prefix and class.
+ //
+ // this crashes pos/depmet_implicit_tpbetareduce.scala
+ // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
+ def betaReduce = transform(sym.info.resultType)
+
+ // #3731: return sym1 for which holds: pre bound sym.name to sym and
+ // pre1 now binds sym.name to sym1, conceptually exactly the same
+ // symbol as sym. The selection of sym on pre must be updated to the
+ // selection of sym1 on pre1, since sym's info was probably updated
+ // by the TypeMap to yield a new symbol, sym1 with transformed info.
+ // @returns sym1
+ override def coevolveSym(pre1: Type): Symbol =
+ if (pre eq pre1) sym else (pre, pre1) match {
+ // don't look at parents -- it would be an error to override alias types anyway
+ case (RefinedType(_, _), RefinedType(_, decls1)) => decls1 lookup sym.name
+ // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
+ case _ => sym
+ }
+
+ }
- // (!result.isEmpty) IFF isHigherKinded
- override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
-
- // note: does not go through typeRef. There's no need to because
- // neither `pre` nor `sym` changes. And there's a performance
- // advantage to call TypeRef directly.
- override def typeConstructor = TypeRef(pre, sym, Nil)
-
- // A reference (in a Scala program) to a type that has type
- // parameters, but where the reference does not include type
- // arguments. Note that it doesn't matter whether the symbol refers
- // to a java or scala symbol, but it does matter whether it occurs in
- // java or scala code. TypeRefs w/o type params that occur in java
- // signatures/code are considered raw types, and are represented as
- // existential types.
- override def isHigherKinded = args.isEmpty && typeParamsDirect.nonEmpty
+ trait AbstractTypeRef extends NonClassTypeRef {
+ require(sym.isAbstractType, sym)
+
+ /** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment
+ */
+ private var symInfoCache: Type = _
+ private var thisInfoCache: Type = _
- override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
- if (isHigherKinded) {
- if (sameLength(formals intersect typeParams, typeParams))
- copyTypeRef(this, pre, sym, actuals)
- // partial application (needed in infer when bunching type arguments from classes and methods together)
+ override def isVolatile = {
+ // need to be careful not to fall into an infinite recursion here
+ // because volatile checking is done before all cycles are detected.
+ // the case to avoid is an abstract type directly or
+ // indirectly upper-bounded by itself. See #2918
+ try {
+ volatileRecursions += 1
+ if (volatileRecursions < LogVolatileThreshold)
+ bounds.hi.isVolatile
+ else if (pendingVolatiles(sym))
+ true // we can return true here, because a cycle will be detected
+ // here afterwards and an error will result anyway.
else
- copyTypeRef(this, pre, sym, dummyArgs).instantiateTypeParams(formals, actuals)
+ try {
+ pendingVolatiles += sym
+ bounds.hi.isVolatile
+ } finally {
+ pendingVolatiles -= sym
+ }
+ } finally {
+ volatileRecursions -= 1
}
- else
- super.instantiateTypeParams(formals, actuals)
+ }
+
+ override def thisInfo = {
+ val symInfo = sym.info
+ if (thisInfoCache == null || (symInfo ne symInfoCache)) {
+ symInfoCache = symInfo
+ thisInfoCache = transformInfo(symInfo) match {
+ // If a subtyping cycle is not detected here, we'll likely enter an infinite
+ // loop before a sensible error can be issued. SI-5093 is one example.
+ case x: SubType if x.supertype eq this =>
+ throw new RecoverableCyclicReference(sym)
+ case tp => tp
+ }
+ }
+ thisInfoCache
+ }
+ override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass
+ override def bounds = thisInfo.bounds
+ // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
+ override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
+ }
- /** @pre: argsMatchTypeParams */
- @inline private def betaReduce: Type = {
- // isHKSubType0 introduces synthetic type params so that
- // betaReduce can first apply sym.info to typeArgs before calling
- // asSeenFrom. asSeenFrom then skips synthetic type params, which
- // are used to reduce HO subtyping to first-order subtyping, but
- // which can't be instantiated from the given prefix and class.
- transform(sym.info.resultType)
- // this crashes pos/depmet_implicit_tpbetareduce.scala
- // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
- }
- private def isBetaReducible = sym.isAliasType && argsMatchTypeParams
-
- // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
- @inline private def etaExpand: Type = {
- val tpars = sym.info.typeParams // must go through sym.info for typeParams to initialise symbol
+ /** A class for named types of the form
+ * `<prefix>.<sym.name>[args]`
+ * Cannot be created directly; one should always use `typeRef`
+ * for creation. (@M: Otherwise hashing breaks)
+ *
+ * @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty
+ */
+ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
+ private[reflect] var parentsCache: List[Type] = _
+ private[reflect] var parentsPeriod = NoPeriod
+ private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
+ private[reflect] var baseTypeSeqPeriod = NoPeriod
+ private var normalized: Type = _
+
+ // @M: propagate actual type params (args) to `tp`, by replacing
+ // formal type parameters with actual ones. If tp is higher kinded,
+ // the "actual" type arguments are types that simply reference the
+ // corresponding type parameters (unbound type variables)
+ def transform(tp: Type): Type
+
+ // eta-expand, subtyping relies on eta-expansion of higher-kinded types
+ protected def normalizeImpl: Type = if (isHigherKinded) etaExpand else super.normalize
+
+ // TODO: test case that is compiled in a specific order and in different runs
+ final override def normalize: Type = {
+ // arises when argument-dependent types are approximated (see def depoly in implicits)
+ if (pre eq WildcardType) WildcardType
+ else if (phase.erasedTypes) normalizeImpl
+ else {
+ if (normalized eq null)
+ normalized = normalizeImpl
+ normalized
+ }
+ }
+
+ def etaExpand: Type = {
+ // must initialise symbol, see test/files/pos/ticket0137.scala
+ val tpars = initializedTypeParams
if (tpars.isEmpty) this
else typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
}
- override def dealias = if (isBetaReducible) betaReduce.dealias else this
+ // only need to rebind type aliases, as typeRef already handles abstract types
+ // (they are allowed to be rebound more liberally)
+ def coevolveSym(pre1: Type): Symbol = sym
- private def normalize0: Type = (
- if (pre eq WildcardType) WildcardType // arises when argument-dependent types are approximated (see def depoly in implicits)
- else if (isHigherKinded) etaExpand // eta-expand, subtyping relies on eta-expansion of higher-kinded types
- else if (isBetaReducible) betaReduce.normalize // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
- else if (sym.isRefinementClass) sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
- else if (sym.isAliasType) ErrorType //println("!!error: "+(pre, sym, sym.info, sym.info.typeParams, args))
- else super.normalize
- )
+ //@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
+ def transformInfo(tp: Type): Type = appliedType(asSeenFromOwner(tp), args)
+
+ def thisInfo = sym.info
+ def initializedTypeParams = sym.info.typeParams
+ def typeParamsMatchArgs = sameLength(initializedTypeParams, args)
+ def asSeenFromOwner(tp: Type) = tp.asSeenFrom(pre, sym.owner)
+
+ override def baseClasses = thisInfo.baseClasses
+ override def baseTypeSeqDepth = baseTypeSeq.maxDepth
+ override def isStable = (sym eq NothingClass) || (sym eq SingletonClass)
+ override def prefix = pre
+ override def termSymbol = super.termSymbol
+ override def termSymbolDirect = super.termSymbol
+ override def typeArgs = args
+ override def typeOfThis = transform(sym.typeOfThis)
+ override def typeSymbol = sym
+ override def typeSymbolDirect = sym
- // TODO: test case that is compiled in a specific order and in different runs
- override def normalize: Type = {
- if (phase.erasedTypes) normalize0
- else {
- if (normalized == null)
- normalized = normalize0
+ override lazy val isTrivial: Boolean =
+ !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
- normalized
+ override def isNotNull =
+ sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull
+
+ override def parents: List[Type] = {
+ val cache = parentsCache
+ if (parentsPeriod == currentPeriod && cache != null) cache
+ else {
+ defineParentsOfTypeRef(this)
+ parentsCache
}
}
@@ -1972,65 +2111,36 @@ A type's typeSymbol should never be inspected directly.
}
thisInfo.decls
}
-
- override def baseType(clazz: Symbol): Type =
- if (sym == clazz) this
- else if (sym.isClass) transform(sym.info.baseType(clazz))
- else
- try {
- basetypeRecursions += 1
- if (basetypeRecursions < LogPendingBaseTypesThreshold)
- relativeInfo.baseType(clazz)
- else if (pendingBaseTypes contains this)
- if (clazz == AnyClass) clazz.tpe else NoType
- else
- try {
- pendingBaseTypes += this
- relativeInfo.baseType(clazz)
- } finally {
- pendingBaseTypes -= this
- }
- } finally {
- basetypeRecursions -= 1
- }
+
+ protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
override def baseTypeSeq: BaseTypeSeq = {
- val period = baseTypeSeqPeriod
- if (period != currentPeriod) {
- baseTypeSeqPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- incCounter(typerefBaseTypeSeqCount)
- baseTypeSeqCache = undetBaseTypeSeq
- baseTypeSeqCache =
- if (sym.isAbstractType) transform(bounds.hi).baseTypeSeq prepend this
- else sym.info.baseTypeSeq map transform
- }
+ val cache = baseTypeSeqCache
+ if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
+ cache
+ else {
+ defineBaseTypeSeqOfTypeRef(this)
+ if (baseTypeSeqCache == undetBaseTypeSeq)
+ throw new RecoverableCyclicReference(sym)
+
+ baseTypeSeqCache
}
- if (baseTypeSeqCache == undetBaseTypeSeq)
- throw new TypeError("illegal cyclic inheritance involving " + sym)
- baseTypeSeqCache
}
- override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
-
- override def baseClasses: List[Symbol] = thisInfo.baseClasses
-
- // override def isNullable: Boolean = sym.info.isNullable
private def preString = (
// ensure that symbol is not a local copy with a name coincidence
if (!settings.debug.value && shorthands(sym.fullName) && sym.ownerChain.forall(_.isClass)) ""
else pre.prefixString
)
private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
- private def refinementString = (
+ def refinementString = (
if (sym.isStructuralRefinement) (
decls filter (sym => sym.isPossibleInRefinement && sym.isPublic)
map (_.defString)
- mkString(" {", "; ", "}")
+ mkString("{", "; ", "}")
)
else ""
)
-
private def finishPrefix(rest: String) = (
if (sym.isPackageClass) packagePrefix + rest
else if (sym.isModuleClass) objectPrefix + rest
@@ -2050,8 +2160,11 @@ A type's typeSymbol should never be inspected directly.
// ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
// from (T1, T2) => R.
targs match {
- case in :: out :: Nil if !isTupleTypeOrSubtype(in) =>
- "" + in + " => " + out
+ case in :: out :: Nil if !isTupleTypeOrSubtype(in) =>
+ // A => B => C should be (A => B) => C or A => (B => C)
+ val in_s = if (isFunctionType(in)) "(" + in + ")" else "" + in
+ val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
+ in_s + " => " + out_s
case xs =>
xs.init.mkString("(", ", ", ")") + " => " + xs.last
}
@@ -2083,12 +2196,45 @@ A type's typeSymbol should never be inspected directly.
override def kind = "TypeRef"
}
- final class UniqueTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends TypeRef(pre, sym, args) with UniqueType { }
-
object TypeRef extends TypeRefExtractor {
- def apply(pre: Type, sym: Symbol, args: List[Type]): Type = {
- unique(new UniqueTypeRef(pre, sym, args))
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
+ if (args.nonEmpty) {
+ if (sym.isAliasType) new ArgsTypeRef(pre, sym, args) with AliasTypeRef
+ else if (sym.isAbstractType) new ArgsTypeRef(pre, sym, args) with AbstractTypeRef
+ else new ArgsTypeRef(pre, sym, args) with ClassTypeRef
+ }
+ else {
+ if (sym.isAliasType) new NoArgsTypeRef(pre, sym) with AliasTypeRef
+ else if (sym.isAbstractType) new NoArgsTypeRef(pre, sym) with AbstractTypeRef
+ else new NoArgsTypeRef(pre, sym) with ClassTypeRef
+ }
+ })
+ }
+
+ protected def defineParentsOfTypeRef(tpe: TypeRef) = {
+ val period = tpe.parentsPeriod
+ if (period != currentPeriod) {
+ tpe.parentsPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
+ } else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
+ tpe.parentsCache = List(AnyClass.tpe)
+ }
+ }
+ }
+
+ protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = {
+ val period = tpe.baseTypeSeqPeriod
+ if (period != currentPeriod) {
+ tpe.baseTypeSeqPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ incCounter(typerefBaseTypeSeqCount)
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ }
}
+ if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
+ throw new TypeError("illegal cyclic inheritance involving " + tpe.sym)
}
/** A class representing a method type with parameters.
@@ -2236,6 +2382,15 @@ A type's typeSymbol should never be inspected directly.
}
object PolyType extends PolyTypeExtractor
+
+ /** A creator for existential types which flattens nested existentials.
+ */
+ def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
+ if (quantified.isEmpty) underlying
+ else underlying match {
+ case ExistentialType(qs, restpe) => newExistentialType(quantified ::: qs, restpe)
+ case _ => ExistentialType(quantified, underlying)
+ }
case class ExistentialType(quantified: List[Symbol],
override val underlying: Type) extends RewrappingTypeProxy
@@ -2268,7 +2423,7 @@ A type's typeSymbol should never be inspected directly.
override def isHigherKinded = false
override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
- deriveType(quantified, tparam => newExistentialSkolem(tparam, owner orElse tparam.owner, origin))(underlying)
+ deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
private def wildcardArgsString(available: Set[Symbol], args: List[Type]): List[String] = args match {
case TypeRef(_, sym, _) :: args1 if (available contains sym) =>
@@ -2278,29 +2433,41 @@ A type's typeSymbol should never be inspected directly.
case _ =>
List()
}
-
+ /** An existential can only be printed with wildcards if:
+ * - the underlying type is a typeref
+ * - where there is a 1-to-1 correspondence between underlying's typeargs and quantified
+ * - and none of the existential parameters is referenced from anywhere else in the type
+ * - and none of the existential parameters are singleton types
+ */
+ private def isRepresentableWithWildcards = !settings.debug.value && {
+ val qset = quantified.toSet
+ !qset.exists(_.isSingletonExistential) && (underlying match {
+ case TypeRef(_, sym, args) =>
+ sameLength(args, quantified) && {
+ args forall { arg =>
+ qset(arg.typeSymbol) && !qset.exists(arg.typeSymbol.info.bounds contains _)
+ }
+ }
+ case _ => false
+ })
+ }
override def safeToString: String = {
- if (!(quantified exists (_.isSingletonExistential)) && !settings.debug.value)
- // try to represent with wildcards first
- underlying match {
- case TypeRef(pre, sym, args) if args.nonEmpty =>
- val wargs = wildcardArgsString(quantified.toSet, args)
- if (sameLength(wargs, args))
- return TypeRef(pre, sym, List()) + wargs.mkString("[", ", ", "]")
- case _ =>
- }
- var ustr = underlying.toString
+ def clauses = {
+ val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
+ if (settings.explaintypes.value) "(" + str + ")" else str
+ }
underlying match {
- case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => ustr = "("+ustr+")"
+ case TypeRef(pre, sym, args) if isRepresentableWithWildcards =>
+ "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
+ case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
+ "(" + underlying + ")" + clauses
case _ =>
+ "" + underlying + clauses
}
- val str =
- ustr+(quantified map (_.existentialToString) mkString(" forSome { ", "; ", " }"))
- if (settings.explaintypes.value) "("+str+")" else str
}
override def cloneInfo(owner: Symbol) =
- createFromClonedSymbolsAtOwner(quantified, owner, underlying)(ExistentialType(_, _))
+ createFromClonedSymbolsAtOwner(quantified, owner, underlying)(newExistentialType)
override def atOwner(owner: Symbol) =
if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
@@ -2338,7 +2505,7 @@ A type's typeSymbol should never be inspected directly.
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
- pre.toString + targs.mkString("(with type arguments ", ",", ")");
+ pre.toString + targs.mkString("(with type arguments ", ", ", ")");
override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
// override def memberType(sym: Symbol) = pre.memberType(sym) match {
// case PolyType(tparams, restp) =>
@@ -2358,7 +2525,7 @@ A type's typeSymbol should never be inspected directly.
object HasTypeMember {
def apply(name: TypeName, tp: Type): Type = {
val bound = refinedType(List(WildcardType), NoSymbol)
- val bsym = bound.typeSymbol.newAliasType(NoPosition, name)
+ val bsym = bound.typeSymbol.newAliasType(name)
bsym setInfo tp
bound.decls enter bsym
bound
@@ -2385,31 +2552,54 @@ A type's typeSymbol should never be inspected directly.
// but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
- // encapsulate suspension so we can automatically link the suspension of cloned
- // typevars to their original if this turns out to be necessary
-/*
- def Suspension = new Suspension
- class Suspension {
- private val suspended = mutable.HashSet[TypeVar]()
- def suspend(tv: TypeVar): Unit = {
- tv.suspended = true
- suspended += tv
- }
- def resumeAll(): Unit = {
- for (tv <- suspended) {
- tv.suspended = false
+ @inline final def trace[T](action: String, msg: => String)(value: T): T = {
+ if (traceTypeVars) {
+ val s = msg match {
+ case "" => ""
+ case str => "( " + str + " )"
}
- suspended.clear()
+ Console.err.println("[%10s] %-25s%s".format(action, value, s))
}
+ value
+ }
+
+ /** Create a new TypeConstraint based on the given symbol.
+ */
+ private def deriveConstraint(tparam: Symbol): TypeConstraint = {
+ /** Must force the type parameter's info at this point
+ * or things don't end well for higher-order type params.
+ * See SI-5359.
+ */
+ val bounds = tparam.info.bounds
+ /** We can seed the type constraint with the type parameter
+ * bounds as long as the types are concrete. This should lower
+ * the complexity of the search even if it doesn't improve
+ * any results.
+ */
+ if (propagateParameterBoundsToTypeVars) {
+ val exclude = bounds.isEmptyBounds || bounds.exists(_.typeSymbolDirect.isNonClassType)
+
+ if (exclude) new TypeConstraint
+ else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
+ }
+ else new TypeConstraint
+ }
+ def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
+ def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
+ def apply(tparam: Symbol): TypeVar = apply(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams)
+
+ /** This is the only place TypeVars should be instantiated.
+ */
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar = {
+ val tv = (
+ if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr)
+ else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args)
+ else if (args.isEmpty) new HKTypeVar(origin, constr, params)
+ else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
+ )
+
+ trace("create", "In " + tv.originLocation)(tv)
}
-*/
- def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
- def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- // TODO why not initialise TypeConstraint with bounds of tparam?
- // @PP: I tried that, didn't work out so well for me.
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) =
- new TypeVar(origin, constr, args, params)
}
// TODO: I don't really know why this happens -- maybe because
@@ -2436,31 +2626,97 @@ A type's typeSymbol should never be inspected directly.
tp.typeSymbol
)
+ /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
+ */
+ class HKTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ override val params: List[Symbol]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(params.nonEmpty, this)
+ override def isHigherKinded = true
+ override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
+ }
+
+ /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
+ */
+ class AppliedTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ zippedArgs: List[(Symbol, Type)]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(zippedArgs.nonEmpty, this)
+
+ override def params: List[Symbol] = zippedArgs map (_._1)
+ override def typeArgs: List[Type] = zippedArgs map (_._2)
+
+ override protected def typeVarString = (
+ zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
+ )
+ }
+
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
* A TypeVar with nonEmpty typeArgs can only be instantiated by a higher-kinded
* type that can be applied to those args. A TypeVar is much like a TypeRef,
* except it has special logic for equality and subtyping.
+ *
+ * Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
class TypeVar(
val origin: Type,
- val constr0: TypeConstraint,
- override val typeArgs: List[Type],
- override val params: List[Symbol]
+ val constr0: TypeConstraint
) extends Type {
- private val numArgs = typeArgs.length
- // params are needed to keep track of variance (see mapOverArgs in SubstMap)
- assert(typeArgs.isEmpty || sameLength(typeArgs, params))
- // var tid = { tidCount += 1; tidCount } //DEBUG
+ override def params: List[Symbol] = Nil
+ override def typeArgs: List[Type] = Nil
+ override def isHigherKinded = false
- /** The constraint associated with the variable */
+ /** The constraint associated with the variable
+ * Syncnote: Type variables are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of `constr`
+ * or `encounteredHigherLevel` or `suspended` accesses should be necessary.
+ */
var constr = constr0
def instValid = constr.instValid
/** The variable's skolemization level */
val level = skolemizationLevel
-
+
+ /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
+ * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
+ *
+ * `constr` for `?CC` only tracks type constructors anyway,
+ * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
+ * `?CC's` hibounds contains List and Iterable.
+ */
+ def applyArgs(newArgs: List[Type]): TypeVar = (
+ if (newArgs.isEmpty && typeArgs.isEmpty)
+ this
+ else if (newArgs.size == params.size) {
+ val tv = TypeVar(origin, constr, newArgs, params)
+ TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
+ }
+ else
+ throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs)
+ )
+ // newArgs.length may differ from args.length (could've been empty before)
+ //
+ // !!! @PP - I need an example of this, since this exception never triggers
+ // even though I am requiring the size match.
+ //
+ // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
+ // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
+ // TypeVars get applied to different arguments over time (in asSeenFrom)
+ // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
+ // thus: make new TypeVar's for every application of a TV to args,
+ // inference may generate several TypeVar's for a single type parameter that must be inferred,
+ // only one of them is in the set of tvars that need to be solved, but
+ // they share the same TypeConstraint instance
+
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
@@ -2471,26 +2727,6 @@ A type's typeSymbol should never be inspected directly.
private var encounteredHigherLevel = false
private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
- /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
- * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
- *
- * `constr` for `?CC` only tracks type constructors anyway,
- * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
- * `?CC's` hibounds contains List and Iterable.
- */
- def applyArgs(newArgs: List[Type]): TypeVar =
- if (newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...)
- else TypeVar(origin, constr, newArgs, params) // @M TODO: interaction with undoLog??
- // newArgs.length may differ from args.length (could've been empty before)
- // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
- // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
- // TypeVars get applied to different arguments over time (in asSeenFrom)
- // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
- // thus: make new TypeVar's for every application of a TV to args,
- // inference may generate several TypeVar's for a single type parameter that must be inferred,
- // only one of them is in the set of tvars that need to be solved, but
- // they share the same TypeConstraint instance
-
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
@@ -2499,7 +2735,8 @@ A type's typeSymbol should never be inspected directly.
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
- constr.inst = if (shouldRepackType) repackExistential(tp) else tp
+ val res = if (shouldRepackType) repackExistential(tp) else tp
+ constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
@@ -2576,11 +2813,10 @@ A type's typeSymbol should never be inspected directly.
* type parameter we're trying to infer (the result will be sanity-checked later).
*/
def unifyFull(tpe: Type) = {
- // Since the alias/widen variations are often no-ops, this
- // keenly collects them in a Set to avoid redundant tests.
+ // The alias/widen variations are often no-ops.
val tpes = (
- if (isLowerBound) Set(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias)
- else Set(tpe)
+ if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct
+ else List(tpe)
)
tpes exists { tp =>
val lhs = if (isLowerBound) tp.typeArgs else typeArgs
@@ -2680,32 +2916,56 @@ A type's typeSymbol should never be inspected directly.
|| !containsSkolemAboveLevel(tp) // side-effects tracking boolean
|| enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences
)
- override val isHigherKinded = typeArgs.isEmpty && params.nonEmpty
- override def normalize: Type =
+ override def normalize: Type = (
if (constr.instValid) constr.inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor)))
else super.normalize
-
+ )
override def typeSymbol = origin.typeSymbol
override def isStable = origin.isStable
override def isVolatile = origin.isVolatile
- private def levelString = if (settings.explaintypes.value) level else ""
- override def safeToString = constr.inst match {
- case null => "<null " + origin + ">"
- case NoType => "?" + levelString + origin + typeArgsString(this)
- case x => "" + x
+ private def tparamsOfSym(sym: Symbol) = sym.info match {
+ case PolyType(tparams, _) if tparams.nonEmpty =>
+ tparams map (_.defString) mkString("[", ",", "]")
+ case _ => ""
+ }
+ def originName = {
+ val name = origin.typeSymbolDirect.decodedName
+ if (name contains "_$") origin.typeSymbolDirect.decodedName else name
+ }
+ def originLocation = {
+ val sym = origin.typeSymbolDirect
+ val encl = sym.owner.logicallyEnclosingMember
+
+ // This should display somewhere between one and three
+ // things which enclose the origin: at most, a class, a
+ // a method, and a term. At least, a class.
+ List(
+ Some(encl.enclClass),
+ if (encl.isMethod) Some(encl) else None,
+ if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
+ ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
}
+ private def levelString = if (settings.explaintypes.value) level else ""
+ protected def typeVarString = originName
+ override def safeToString = (
+ if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
+ else if (constr.inst ne NoType) "" + constr.inst
+ else "?" + levelString + originName
+ )
override def kind = "TypeVar"
def cloneInternal = {
// cloning a suspended type variable when it's suspended will cause the clone
// to never be resumed with the current implementation
- assert(!suspended)
- TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ assert(!suspended, this)
+ TypeVar.trace("clone", originLocation)(
+ TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ )
}
}
@@ -2890,7 +3150,7 @@ A type's typeSymbol should never be inspected directly.
* @return ...
*/
def refinedType(parents: List[Type], owner: Symbol): Type =
- refinedType(parents, owner, new Scope, owner.pos)
+ refinedType(parents, owner, newScope, owner.pos)
def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
if ((parents eq original.parents) && (decls eq original.decls)) original
@@ -2921,7 +3181,7 @@ A type's typeSymbol should never be inspected directly.
// don't expand cyclical type alias
// we require that object is initialized, thus info.typeParams instead of typeParams.
if (sym1.isAliasType && sameLength(sym1.info.typeParams, args) && !sym1.lockOK)
- throw new TypeError("illegal cyclic reference involving " + sym1)
+ throw new RecoverableCyclicReference(sym1)
val pre1 = pre match {
case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
@@ -2943,7 +3203,7 @@ A type's typeSymbol should never be inspected directly.
def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match {
case TypeRef(pre0, sym0, _) if pre == pre0 && sym0.name == sym.name =>
if (sym.isAliasType && sameLength(sym.info.typeParams, args) && !sym.lockOK)
- throw new TypeError("illegal cyclic reference involving " + sym)
+ throw new RecoverableCyclicReference(sym)
TypeRef(pre, sym, args)
case _ =>
@@ -2962,6 +3222,9 @@ A type's typeSymbol should never be inspected directly.
/** A creator for intersection type where intersections of a single type are
* replaced by the type itself, and repeated parent classes are merged.
+ *
+ * !!! Repeated parent classes are not merged - is this a bug in the
+ * comment or in the code?
*/
def intersectionType(tps: List[Type], owner: Symbol): Type = tps match {
case List(tp) =>
@@ -2999,7 +3262,7 @@ A type's typeSymbol should never be inspected directly.
case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
- case ExistentialType(tparams, restpe) => ExistentialType(tparams, appliedType(restpe, args))
+ case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
@@ -3009,6 +3272,25 @@ A type's typeSymbol should never be inspected directly.
case WildcardType => tycon // needed for neg/t0226
case _ => abort(debugString(tycon))
}
+
+ /** A creator for existential types where the type arguments,
+ * rather than being applied directly, are interpreted as the
+ * upper bounds of unknown types. For instance if the type argument
+ * list given is List(AnyRefClass), the resulting type would be
+ * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
+ */
+ def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
+ tycon match {
+ case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
+ val eparams = typeParamsToExistentials(sym)
+ val bounds = args map (TypeBounds upper _)
+ (eparams, bounds).zipped foreach (_ setInfo _)
+
+ newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ appliedType(tycon, args)
+ }
+ }
/** A creator for type parameterizations that strips empty type parameter lists.
* Use this factory method to indicate the type has kind * (it's a polymorphic value)
@@ -3038,71 +3320,23 @@ A type's typeSymbol should never be inspected directly.
*
* tpe1 where { tparams }
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`. Extrapolating means
- * that type variables in `tparams` occurring in covariant positions are replaced by upper bounds,
- * (minus any SingletonClass markers),
- * type variables in `tparams` occurring in contravariant positions are replaced by upper bounds,
- * provided the resulting type is legal wrt to stability, and does not contain any
- * type variable in `tparams`.
- * The abstraction drops all type parameters that are not directly or indirectly
- * referenced by type `tpe1`.
- * If there are no remaining type parameters, simply returns result type `tpe`.
+ * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * Extrapolating means that type variables in `tparams` occurring
+ * in covariant positions are replaced by upper bounds, (minus any
+ * SingletonClass markers), type variables in `tparams` occurring in
+ * contravariant positions are replaced by upper bounds, provided the
+ * resulting type is legal wrt to stability, and does not contain any type
+ * variable in `tparams`.
+ *
+ * The abstraction drops all type parameters that are not directly or
+ * indirectly referenced by type `tpe1`. If there are no remaining type
+ * parameters, simply returns result type `tpe`.
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
else {
- var occurCount = emptySymCount ++ (tparams map (_ -> 0))
- val tpe = deAlias(tpe0)
- def countOccs(tp: Type) =
- for (t <- tp) {
- t match {
- case TypeRef(_, sym, _) =>
- occurCount get sym match {
- case Some(count) => occurCount += (sym -> (count + 1))
- case none =>
- }
- case _ =>
- }
- }
- countOccs(tpe)
- for (tparam <- tparams) countOccs(tparam.info)
-
- val extrapolate = new TypeMap {
- variance = 1
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- tp1 match {
- case TypeRef(pre, sym, args) if (variance != 0) && (occurCount isDefinedAt sym) =>
- val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
- if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
- repl
- else tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- // Do not discard the types of existential ident's. The
- // symbol of the Ident itself cannot be listed in the
- // existential's parameters, so the resulting existential
- // type would be ill-formed.
- override def mapOver(tree: Tree) = tree match {
- case Ident(_) if tree.tpe.isStable => tree
- case _ => super.mapOver(tree)
- }
- }
- val tpe1 = extrapolate(tpe)
+ val tpe = deAlias(tpe0)
+ val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
var tparams0 = tparams
var tparams1 = tparams0 filter tpe1.contains
@@ -3112,11 +3346,7 @@ A type's typeSymbol should never be inspected directly.
tparams1 exists { p1 => p1 == p || (p1.info contains p) }
}
}
- if (tparams1.isEmpty) tpe1
- else tpe1 match {
- case ExistentialType(tparams2, tpe2) => ExistentialType(tparams1 ::: tparams2, tpe2)
- case _ => ExistentialType(tparams1, tpe1)
- }
+ newExistentialType(tparams1, tpe1)
}
/** Remove any occurrences of type aliases from this type */
@@ -3157,6 +3387,20 @@ A type's typeSymbol should never be inspected directly.
mapOver(tp)
}
}
+
+ /** Type with all top-level occurrences of abstract types replaced by their bounds */
+ def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
+ case TypeRef(_, sym, _) if sym.isAbstractType =>
+ abstractTypesToBounds(tp.bounds.hi)
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ abstractTypesToBounds(tp.normalize)
+ case rtp @ RefinedType(parents, decls) =>
+ copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls)
+ case AnnotatedType(_, underlying, _) =>
+ abstractTypesToBounds(underlying)
+ case _ =>
+ tp
+ }
// Set to true for A* => Seq[A]
// (And it will only rewrite A* in method result types.)
@@ -3207,8 +3451,8 @@ A type's typeSymbol should never be inspected directly.
case DeBruijnBinder(pnames, ptypes, restpe) =>
val isType = pnames.head.isTypeName
val newParams = for (name <- pnames) yield
- if (isType) owner.newTypeParameter(NoPosition, name.toTypeName)
- else owner.newValueParameter(NoPosition, name)
+ if (isType) owner.newTypeParameter(name.toTypeName)
+ else owner.newValueParameter(name.toTermName)
paramStack = newParams :: paramStack
try {
(newParams, ptypes).zipped foreach ((p, t) => p setInfo this(t))
@@ -3229,7 +3473,7 @@ A type's typeSymbol should never be inspected directly.
private var uniques: util.HashSet[Type] = _
private var uniqueRunId = NoRunId
- private def unique[T <: Type](tp: T): T = {
+ protected def unique[T <: Type](tp: T): T = {
incCounter(rawTypeCount)
if (uniqueRunId != currentRunId) {
uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
@@ -3251,10 +3495,23 @@ A type's typeSymbol should never be inspected directly.
*/
class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
def this() = this(List(), List())
+
+ /* Syncnote: Type constraints are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of any
+ * variables should be ncessesary.
+ */
- private var lobounds = lo0
- private var hibounds = hi0
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound. [Actually
+ * guarding addLoBound/addHiBound somehow broke raw types so it
+ * only guards against being created with them.]
+ */
+ private var lobounds = lo0 filterNot (_.typeSymbolDirect eq NothingClass)
+ private var hibounds = hi0 filterNot (_.typeSymbolDirect eq AnyClass)
private var numlo = numlo0
private var numhi = numhi0
private var avoidWidening = avoidWidening0
@@ -3308,10 +3565,18 @@ A type's typeSymbol should never be inspected directly.
tc
}
- override def toString =
- (loBounds map (_.safeToString)).mkString("[ _>:(", ",", ") ") +
- (hiBounds map (_.safeToString)).mkString("| _<:(", ",", ") ] _= ") +
- inst.safeToString
+ override def toString = {
+ val boundsStr = {
+ val lo = loBounds filterNot (_.typeSymbolDirect eq NothingClass)
+ val hi = hiBounds filterNot (_.typeSymbolDirect eq AnyClass)
+ val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
+ val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
+
+ lostr ++ histr mkString ("[", " | ", "]")
+ }
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
}
trait AnnotationFilter extends TypeMap {
@@ -3327,46 +3592,57 @@ A type's typeSymbol should never be inspected directly.
def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
}
- /** A prototype for mapping a function over all possible types
- */
- abstract class TypeMap extends Function1[Type, Type] {
- def apply(tp: Type): Type
+ trait VariantTypeMap extends TypeMap {
+ private[this] var _variance = 1
- /** The variance relative to start. If you want variances to be significant, set
- * variance = 1
- * at the top of the typemap.
- */
- var variance = 0
+ override def variance = _variance
+ def variance_=(x: Int) = _variance = x
+
+ override protected def noChangeToSymbols(origSyms: List[Symbol]) = {
+ origSyms forall { sym =>
+ val v = variance
+ if (sym.isAliasType) variance = 0
+ val result = this(sym.info)
+ variance = v
+ result eq sym.info
+ }
+ }
- // #3731: return sym1 for which holds: pre bound sym.name to sym and
- // pre1 now binds sym.name to sym1, conceptually exactly the same
- // symbol as sym. The selection of sym on pre must be updated to the
- // selection of sym1 on pre1, since sym's info was probably updated
- // by the TypeMap to yield a new symbol, sym1 with transformed info.
- // @returns sym1
- protected def coevolveSym(pre: Type, pre1: Type, sym: Symbol): Symbol =
- if((pre ne pre1) && sym.isAliasType) // only need to rebind type aliases here, as typeRef already handles abstract types (they are allowed to be rebound more liberally)
- (pre, pre1) match {
- case (RefinedType(_, decls), RefinedType(_, decls1)) => // don't look at parents -- it would be an error to override alias types anyway
- //val sym1 =
- decls1.lookup(sym.name)
-// assert(decls.lookupAll(sym.name).toList.length == 1)
-// assert(decls1.lookupAll(sym.name).toList.length == 1)
-// assert(sym1.isAliasType)
-// println("coevolved "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
-// sym1
- case _ => // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
-// val sym1 = pre1.nonPrivateMember(sym.name).suchThat(sym => sym.isAliasType)
-// println("??coevolve "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
- sym
- }
- else sym
+ override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ map2Conserve(args, tparams) { (arg, tparam) =>
+ val v = variance
+ if (tparam.isContravariant) variance = -variance
+ else if (!tparam.isCovariant) variance = 0
+ val arg1 = this(arg)
+ variance = v
+ arg1
+ }
/** Map this function over given type */
- def mapOver(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
+ override def mapOver(tp: Type): Type = tp match {
+ case MethodType(params, result) =>
+ variance = -variance
+ val params1 = mapOver(params)
+ variance = -variance
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ variance = -variance
+ val tparams1 = mapOver(tparams)
+ variance = -variance
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case TypeBounds(lo, hi) =>
+ variance = -variance
+ val lo1 = this(lo)
+ variance = -variance
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case tr @ TypeRef(pre, sym, args) =>
val pre1 = this(pre)
- //val args1 = args mapConserve this(_)
val args1 =
if (args.isEmpty)
args
@@ -3378,7 +3654,28 @@ A type's typeSymbol should never be inspected directly.
else mapOverArgs(args, tparams)
}
if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, coevolveSym(pre, pre1, sym), args1)
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case _ =>
+ super.mapOver(tp)
+ }
+ }
+
+ /** A prototype for mapping a function over all possible types
+ */
+ abstract class TypeMap extends (Type => Type) {
+ def apply(tp: Type): Type
+
+ /** Mix in VariantTypeMap if you want variances to be significant.
+ */
+ def variance = 0
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = tp match {
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve this
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
case ThisType(_) => tp
case SingleType(pre, sym) =>
if (sym.isPackageClass) tp // short path
@@ -3388,16 +3685,12 @@ A type's typeSymbol should never be inspected directly.
else singleType(pre1, sym)
}
case MethodType(params, result) =>
- variance = -variance
val params1 = mapOver(params)
- variance = -variance
val result1 = this(result)
if ((params1 eq params) && (result1 eq result)) tp
else copyMethodType(tp, params1, result1.substSym(params, params1))
case PolyType(tparams, result) =>
- variance = -variance
val tparams1 = mapOver(tparams)
- variance = -variance
var result1 = this(result)
if ((tparams1 eq tparams) && (result1 eq result)) tp
else PolyType(tparams1, result1.substSym(tparams, tparams1))
@@ -3412,9 +3705,7 @@ A type's typeSymbol should never be inspected directly.
if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
else SuperType(thistp1, supertp1)
case TypeBounds(lo, hi) =>
- variance = -variance
val lo1 = this(lo)
- variance = -variance
val hi1 = this(hi)
if ((lo1 eq lo) && (hi1 eq hi)) tp
else TypeBounds(lo1, hi1)
@@ -3432,7 +3723,7 @@ A type's typeSymbol should never be inspected directly.
val tparams1 = mapOver(tparams)
var result1 = this(result)
if ((tparams1 eq tparams) && (result1 eq result)) tp
- else ExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
case OverloadedType(pre, alts) =>
val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
if (pre1 eq pre) tp
@@ -3470,37 +3761,29 @@ A type's typeSymbol should never be inspected directly.
// throw new Error("mapOver inapplicable for " + tp);
}
- protected final def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- map2Conserve(args, tparams) { (arg, tparam) =>
- val v = variance
- if (tparam.isContravariant) variance = -variance
- else if (!tparam.isCovariant) variance = 0
- val arg1 = this(arg)
- variance = v
- arg1
- }
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ args mapConserve this
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned. Overridden in VariantTypeMap.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]) =
+ origSyms forall (sym => sym.info eq this(sym.info))
/** Map this function over given scope */
def mapOver(scope: Scope): Scope = {
val elems = scope.toList
val elems1 = mapOver(elems)
if (elems1 eq elems) scope
- else new Scope(elems1)
+ else newScopeWith(elems1: _*)
}
-
+
/** Map this function over given list of symbols */
def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- val change = origSyms exists { sym =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- result ne sym.info
- }
- // map is not the identity --> do cloning properly
- if (change) cloneSymbolsAndModify(origSyms, TypeMap.this)
// fast path in case nothing changes due to map
- else origSyms
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
}
def mapOver(annot: AnnotationInfo): AnnotationInfo = {
@@ -3557,6 +3840,11 @@ A type's typeSymbol should never be inspected directly.
def traverse(tp: Type): Unit
def apply(tp: Type): Type = { traverse(tp); tp }
}
+
+ abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+ def result: T
+ def clear(): Unit
+ }
abstract class TypeCollector[T](initial: T) extends TypeTraverser {
var result: T = _
@@ -3567,15 +3855,29 @@ A type's typeSymbol should never be inspected directly.
}
}
- private val emptySymMap = immutable.Map[Symbol, Symbol]()
- private val emptySymCount = immutable.Map[Symbol, Int]()
+ /** A collector that tests for existential types appearing at given variance in a type
+ * @PP: Commenting out due to not being used anywhere.
+ */
+ // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
+ // variance = v
+ //
+ // def traverse(tp: Type) = tp match {
+ // case ExistentialType(_, _) if (variance == v) => result = true
+ // case _ => mapOver(tp)
+ // }
+ // }
+ //
+ // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
+ // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
- val eparams = for ((tparam, i) <- tparams.zipWithIndex) yield {
- clazz.newExistential(clazz.pos, newTypeName("?"+i)).setInfo(tparam.info.bounds)
- }
+ val eparams = mapWithIndex(tparams)((tparam, i) =>
+ clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds)
+
eparams map (_ substInfo (tparams, eparams))
}
+ def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
+ typeParamsToExistentials(clazz, clazz.typeParams)
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
@@ -3599,7 +3901,7 @@ A type's typeSymbol should never be inspected directly.
* the conversion of raw types to existential types might not have taken place
* in ClassFileparser.sigToType (where it is usually done).
*/
- object rawToExistential extends TypeMap {
+ def rawToExistential = new TypeMap {
private var expanded = immutable.Set[Symbol]()
private var generated = immutable.Set[Type]()
def apply(tp: Type): Type = tp match {
@@ -3607,7 +3909,7 @@ A type's typeSymbol should never be inspected directly.
if (expanded contains sym) AnyRefClass.tpe
else try {
expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams))
+ val eparams = mapOver(typeParamsToExistentials(sym))
existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
} finally {
expanded -= sym
@@ -3620,6 +3922,62 @@ A type's typeSymbol should never be inspected directly.
mapOver(tp)
}
}
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance == 0) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
+ if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
+ repl
+ else tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
@@ -3627,6 +3985,7 @@ A type's typeSymbol should never be inspected directly.
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
+ var capturedPre = emptySymMap
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
@@ -3637,10 +3996,10 @@ A type's typeSymbol should never be inspected directly.
if (tree.symbol isNonBottomSubClass clazz) &&
(pre.widen.typeSymbol isNonBottomSubClass tree.symbol) =>
if (pre.isStable) { // XXX why is this in this method? pull it out and guard the call `annotationArgRewriter.transform(tree)`?
- val termSym =
- pre.typeSymbol.owner.newValue(
- pre.typeSymbol.pos,
- pre.typeSymbol.name.toTermName).setInfo(pre) // what symbol should really be used?
+ val termSym = (
+ pre.typeSymbol.owner.newValue(pre.typeSymbol.name.toTermName, pre.typeSymbol.pos) // what symbol should really be used?
+ setInfo pre
+ )
gen.mkAttributedQualifier(pre, termSym)
} else
giveup()
@@ -3657,8 +4016,6 @@ A type's typeSymbol should never be inspected directly.
annotationArgRewriter.transform(tree)
}
- var capturedPre = emptySymMap
-
def stabilize(pre: Type, clazz: Symbol): Type =
capturedPre.getOrElse(clazz, {
val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
@@ -3762,15 +4119,17 @@ A type's typeSymbol should never be inspected directly.
else instParamRelaxed(ps.tail, as.tail)
//Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
- if (sameLength(basesym.typeParams, baseargs)) {
+ if (sameLength(basesym.typeParams, baseargs))
instParam(basesym.typeParams, baseargs)
- } else {
- throw new TypeError(
- "something is wrong (wrong class file?): "+basesym+
- " with type parameters "+
- basesym.typeParams.map(_.name).mkString("[",",","]")+
- " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
- }
+ else
+ if (symclazz.tpe.parents.exists(_.isErroneous))
+ ErrorType // don't be to overzealous with throwing exceptions, see #2641
+ else
+ throw new Error(
+ "something is wrong (wrong class file?): "+basesym+
+ " with type parameters "+
+ basesym.typeParams.map(_.name).mkString("[",",","]")+
+ " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
case ExistentialType(tparams, qtpe) =>
capturedSkolems = capturedSkolems union tparams
toInstance(qtpe, clazz)
@@ -3801,7 +4160,7 @@ A type's typeSymbol should never be inspected directly.
case PolyType(bs, restp) =>
createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
case ExistentialType(bs, restp) =>
- createFromClonedSymbols(bs, restp)(ExistentialType(_, _))
+ createFromClonedSymbols(bs, restp)(newExistentialType)
case _ =>
tp
}
@@ -3996,7 +4355,7 @@ A type's typeSymbol should never be inspected directly.
val symowner = oldSym.owner
val bound = singletonBounds(actualsIndexed(actualIdx))
- val sym = symowner.newExistential(oldSym.pos, newTypeName(oldSym.name + ".type"))
+ val sym = symowner.newExistential(newTypeName(oldSym.name + ".type"), oldSym.pos)
sym.setInfo(bound)
sym.setFlag(oldSym.flags)
@@ -4053,7 +4412,7 @@ A type's typeSymbol should never be inspected directly.
case WildcardType =>
TypeVar(tp, new TypeConstraint)
case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(List(bounds.lo), List(bounds.hi)))
+ TypeVar(tp, new TypeConstraint(bounds))
case _ =>
mapOver(tp)
}
@@ -4156,15 +4515,20 @@ A type's typeSymbol should never be inspected directly.
private def commonOwner(tps: List[Type]): Symbol = {
if (tps.isEmpty) NoSymbol
else {
- commonOwnerMap.result = null
+ commonOwnerMap.clear()
tps foreach (commonOwnerMap traverse _)
val result = if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
debuglog(tps.mkString("commonOwner(", ", ", ") == " + result))
result
}
}
- private object commonOwnerMap extends TypeTraverser {
+
+ protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+ protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
var result: Symbol = _
+
+ def clear() { result = null }
private def register(sym: Symbol) {
// First considered type is the trivial result.
@@ -4181,12 +4545,15 @@ A type's typeSymbol should never be inspected directly.
case _ => mapOver(tp)
}
}
+
+ private lazy val commonOwnerMapObj = new CommonOwnerMap
class MissingAliasControl extends ControlThrowable
val missingAliasException = new MissingAliasControl
class MissingTypeControl extends ControlThrowable
object adaptToNewRunMap extends TypeMap {
+
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
if (phase.flatClasses) {
sym
@@ -4196,10 +4563,14 @@ A type's typeSymbol should never be inspected directly.
definitions.RootPackage
} else if (sym.isModuleClass) {
val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
- val result = sourceModule1.moduleClass
- val msg = "sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
- assert(result != NoSymbol, msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
- result
+ var result = sourceModule1.moduleClass
+ if (result == NoSymbol) result = sourceModule1.initialize.moduleClass
+ if (result != NoSymbol) result
+ else {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
} else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) {
sym
} else {
@@ -4349,8 +4720,7 @@ A type's typeSymbol should never be inspected directly.
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- ((args1, args2, sym1.typeParams).zipped forall {
- (arg1, arg2, tparam) =>
+ forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
else if (arg1.isInstanceOf[TypeVar])
@@ -4360,7 +4730,7 @@ A type's typeSymbol should never be inspected directly.
// also: think what happens if there are embedded typevars?
if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
else true
- })
+ }
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
@@ -4381,7 +4751,7 @@ A type's typeSymbol should never be inspected directly.
*/
def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
def createDummyClone(pre: Type): Type = {
- val dummy = currentOwner.enclClass.newValue(NoPosition, nme.ANYNAME).setInfo(pre.widen)
+ val dummy = currentOwner.enclClass.newValue(nme.ANYNAME).setInfo(pre.widen)
singleType(ThisType(currentOwner.enclClass), dummy)
}
def maybeCreateDummyClone(pre: Type, sym: Symbol): Type = pre match {
@@ -4883,19 +5253,11 @@ A type's typeSymbol should never be inspected directly.
// --> thus, cannot be subtypes (Any/Nothing has already been checked)
}))
- /** True if all three arguments have the same number of elements and
- * the function is true for all the triples.
- */
- @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C], f: (A, B, C) => Boolean): Boolean = {
- if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
- else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail, f)
- }
-
def isSubArg(t1: Type, t2: Type, variance: Int) =
(variance > 0 || t2 <:< t1) && (variance < 0 || t1 <:< t2)
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean =
- corresponds3(tps1, tps2, tparams map (_.variance), isSubArg)
+ corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
@@ -5131,8 +5493,8 @@ A type's typeSymbol should never be inspected directly.
matchesType(tp1, res2, true)
case MethodType(_, _) =>
false
- case PolyType(tparams2, res2) =>
- tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple)
+ case PolyType(_, _) =>
+ false
case _ =>
alwaysMatchSimple || tp1 =:= tp2
}
@@ -5149,6 +5511,8 @@ A type's typeSymbol should never be inspected directly.
else matchesType(tp1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) =>
+ params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
false
}
@@ -5160,6 +5524,8 @@ A type's typeSymbol should never be inspected directly.
matchesType(res1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
matchesType(res1, tp2, alwaysMatchSimple)
}
@@ -5180,6 +5546,12 @@ A type's typeSymbol should never be inspected directly.
if (alwaysMatchSimple) matchesType(res1, tp2, true)
else lastTry
}
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ tp2 match {
+ case MethodType(Nil, res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case NullaryMethodType(res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case _ => lastTry
+ }
case _ =>
lastTry
}
@@ -5267,7 +5639,6 @@ A type's typeSymbol should never be inspected directly.
def solve(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean, depth: Int): Boolean = {
- val config = tvars zip (tparams zip variances)
def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
if (tvar.constr.inst == NoType) {
@@ -5276,15 +5647,17 @@ A type's typeSymbol should never be inspected directly.
val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
//Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
var cyclic = bound contains tparam
- for ((tvar2, (tparam2, variance2)) <- config) {
- if (tparam2 != tparam &&
- ((bound contains tparam2) ||
- up && (tparam2.info.bounds.lo =:= tparam.tpe) ||
- !up && (tparam2.info.bounds.hi =:= tparam.tpe))) {
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpe)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpe)
+ )
+ if (ok) {
if (tvar2.constr.inst eq null) cyclic = true
solveOne(tvar2, tparam2, variance2)
}
- }
+ })
if (!cyclic) {
if (up) {
if (bound.typeSymbol != AnyClass)
@@ -5323,17 +5696,11 @@ A type's typeSymbol should never be inspected directly.
}
// println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- for ((tvar, (tparam, variance)) <- config)
- solveOne(tvar, tparam, variance)
-
+ foreach3(tvars, tparams, variances)(solveOne)
tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
}
/** Do type arguments `targs` conform to formal parameters `tparams`?
- *
- * @param tparams ...
- * @param targs ...
- * @return ...
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
var bounds = instantiatedBounds(pre, owner, tparams, targs)
@@ -5364,7 +5731,7 @@ A type's typeSymbol should never be inspected directly.
val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
val transposed = padded.transpose
- val columns: List[Column[List[Type]]] = sorted.zipWithIndex map {
+ val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
case ((k, v), idx) =>
Column(str(k), (xs: List[Type]) => str(xs(idx)), true)
}
@@ -5373,6 +5740,23 @@ A type's typeSymbol should never be inspected directly.
val formatted = tableDef.table(transposed)
println("** Depth is " + depth + "\n" + formatted)
}
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
/** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
* compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
@@ -5419,6 +5803,19 @@ A type's typeSymbol should never be inspected directly.
// merging, strip targs that refer to bound tparams (when we're computing the lub of type
// constructors.) Also filter out all types that are a subtype of some other type.
if (isUniformFrontier) {
+ if (settings.debug.value || printLubs) {
+ val fbounds = findRecursiveBounds(ts0)
+ if (fbounds.nonEmpty) {
+ println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
+ for ((p0, p1) <- fbounds) {
+ val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
+
+ println(" " + p0.fullLocationString + " appears in " + desc)
+ println(" " + p1 + " " + p1.info.bounds)
+ }
+ println("")
+ }
+ }
val tails = tsBts map (_.tail)
mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
case Some(tp) => tp :: loop(tails)
@@ -5444,7 +5841,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- val initialBTSes = ts map (_.baseTypeSeq.toList)
+ val initialBTSes = ts map (_.baseTypeSeq.toList filter (_.typeSymbol.isPublic))
if (printLubs)
printLubMatrix(ts zip initialBTSes toMap, depth)
@@ -5487,22 +5884,11 @@ A type's typeSymbol should never be inspected directly.
case _ =>
t
}
-
- /** A collector that tests for existential types appearing at given variance in a type */
- class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) {
- def traverse(tp: Type) = tp match {
- case ExistentialType(_, _) if (variance == v) => result = true
- case _ => mapOver(tp)
- }
- def init() = {
- variance = 1
- this
- }
+ def elimRefinement(t: Type) = t match {
+ case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
+ case _ => t
}
- val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
- val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
-
/** Eliminate from list of types all elements which are a subtype
* of some other element of the list. */
private def elimSub(ts: List[Type], depth: Int): List[Type] = {
@@ -5614,13 +6000,13 @@ A type's typeSymbol should never be inspected directly.
}
/** The least upper bound wrt <:< of a list of types */
- def lub(ts: List[Type], depth: Int): Type = {
+ private def lub(ts: List[Type], depth: Int): Type = {
def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
case List() => NothingClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
@@ -5648,9 +6034,16 @@ A type's typeSymbol should never be inspected directly.
val lubType =
if (phase.erasedTypes || depth == 0) lubBase
else {
- val lubRefined = refinedType(lubParents, lubOwner)
+ val lubRefined = refinedType(lubParents, lubOwner)
val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || narrowts.exists(t => !refines(t, sym))
+ )
def lubsym(proto: Symbol): Symbol = {
val prototp = lubThisType.memberInfo(proto)
val syms = narrowts map (t =>
@@ -5659,7 +6052,7 @@ A type's typeSymbol should never be inspected directly.
if (syms contains NoSymbol) NoSymbol
else {
val symtypes =
- (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
else if (symtypes.tail forall (symtypes.head =:=))
@@ -5667,7 +6060,7 @@ A type's typeSymbol should never be inspected directly.
else {
def lubBounds(bnds: List[TypeBounds]): TypeBounds =
TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.pos, proto.name.toTypeName)
+ lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
.setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
}
}
@@ -5679,16 +6072,15 @@ A type's typeSymbol should never be inspected directly.
// efficiency.
alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
}
- for (sym <- lubBase.nonPrivateMembers) {
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- if (!sym.isClass && !sym.isConstructor && !isGetClass(sym) && (narrowts forall (t => refines(t, sym))))
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lubsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try {
+ val lsym = lubsym(sym)
+ if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym)
+ } catch {
+ case ex: NoCommonType =>
+ }
}
if (lubRefined.decls.isEmpty) lubBase
else if (!verifyLubs) lubRefined
@@ -5759,13 +6151,13 @@ A type's typeSymbol should never be inspected directly.
/** The greatest lower bound wrt <:< of a list of types, which have been normalized
* wrt elimSuper */
- private def glbNorm(ts: List[Type], depth: Int): Type = {
+ protected def glbNorm(ts: List[Type], depth: Int): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
case List() => AnyClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, glbNorm(matchingRestypes(ts, params map (_.tpe)), depth))
@@ -5868,6 +6260,26 @@ A type's typeSymbol should never be inspected directly.
if (ts exists (_.isNotNull)) res.notNull else res
}
+
+ /** A list of the typevars in a type. */
+ def typeVarsInType(tp: Type): List[TypeVar] = {
+ var tvs: List[TypeVar] = Nil
+ tp foreach {
+ case t: TypeVar => tvs ::= t
+ case _ =>
+ }
+ tvs.reverse
+ }
+ /** Make each type var in this type use its original type for comparisons instead
+ * of collecting constraints.
+ */
+ def suspendTypeVarsInType(tp: Type): List[TypeVar] = {
+ val tvs = typeVarsInType(tp)
+ // !!! Is it somehow guaranteed that this will not break under nesting?
+ // In general one has to save and restore the contents of the field...
+ tvs foreach (_.suspended = true)
+ tvs
+ }
/** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
@@ -5896,38 +6308,39 @@ A type's typeSymbol should never be inspected directly.
else if (args exists (arg => isValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
else Some(typeRef(pre, sym, List(lub(args))))
}
- } else {
- val args = (sym.typeParams, argss.transpose).zipped map { (tparam, as) =>
- if (depth == 0) {
- if (tparam.variance == variance) {
- // Take the intersection of the upper bounds of the type parameters
- // rather than falling all the way back to "Any", otherwise we end up not
- // conforming to bounds.
- val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
- if (bounds0.isEmpty) AnyClass.tpe
- else intersectionType(bounds0)
- }
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
+ }
+ else {
+ val args = map2(sym.typeParams, argss.transpose) { (tparam, as) =>
+ if (depth == 0) {
+ if (tparam.variance == variance) {
+ // Take the intersection of the upper bounds of the type parameters
+ // rather than falling all the way back to "Any", otherwise we end up not
+ // conforming to bounds.
+ val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
+ if (bounds0.isEmpty) AnyClass.tpe
+ else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
}
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ }
+ else {
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
else {
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
-
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
+
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
}
}
}
+ }
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
}
@@ -6012,148 +6425,6 @@ A type's typeSymbol should never be inspected directly.
throw new NoCommonType(tps)
}
-
- // TODO: this desperately needs to be cleaned up
- // plan: split into kind inference and subkinding
- // every Type has a (cached) Kind
- def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
- checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
-
- /** Check well-kindedness of type application (assumes arities are already checked) -- @M
- *
- * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
- * (checked one type member at a time -- in that case, prefix is the name of the type alias)
- *
- * Type application is just like value application: it's "contravariant" in the sense that
- * the type parameters of the supplied type arguments must conform to the type parameters of
- * the required type parameters:
- * - their bounds must be less strict
- * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
- * - @M TODO: are these conditions correct,sufficient&necessary?
- *
- * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
- * List's type parameter is also covariant and its bounds are weaker than <: Int
- */
- def checkKindBounds0(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol, explainErrors: Boolean): List[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] = {
- var error = false
-
- def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
- def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
-
- /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
- *
- * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
- */
- def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
-
- // check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(
- hkargs: List[Symbol],
- arg: Symbol,
- param: Symbol,
- paramowner: Symbol,
- underHKParams: List[Symbol],
- withHKArgs: List[Symbol]
- ): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
-
- def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
- // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
- val hkparams = param.typeParams
-
- if (settings.debug.value) {
- log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
- log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
- log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
- }
-
- if (!sameLength(hkargs, hkparams)) {
- if (arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
- else {error = true; (List((arg, param)), Nil, Nil) } // shortcut: always set error, whether explainTypesOrNot
- }
- else {
- val _arityMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _varianceMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _stricterBounds = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
-
- def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true}
- def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true }
- def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as }
- def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _varianceMismatches ++= as }
- def stricterBounds(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _stricterBounds ++= as }
-
- for ((hkarg, hkparam) <- hkargs zip hkparams) {
- if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
- if (!variancesMatch(hkarg, hkparam))
- varianceMismatch(hkarg, hkparam)
-
- // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
- // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
- // --> their arguments use different symbols, but are conceptually the same
- // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- val declaredBounds = transformedBounds(hkparam, paramowner)
- val declaredBoundsInst = bindHKParams(declaredBounds)
- val argumentBounds = transform(hkarg.info.bounds, owner)
- if (!(declaredBoundsInst <:< argumentBounds))
- stricterBound(hkarg, hkparam)
-
- debuglog(
- "checkKindBoundsHK base case: " + hkparam +
- " declared bounds: " + declaredBounds +
- " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
- "checkKindBoundsHK base case: "+ hkarg +
- " has bounds: " + argumentBounds
- )
- }
- else {
- debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
- val (am, vm, sb) = checkKindBoundsHK(
- hkarg.typeParams,
- hkarg,
- hkparam,
- paramowner,
- underHKParams ++ hkparam.typeParams,
- withHKArgs ++ hkarg.typeParams
- )
- arityMismatches(am)
- varianceMismatches(vm)
- stricterBounds(sb)
- }
- if (!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
- }
- if (!explainErrors) (Nil, Nil, Nil)
- else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
- }
- }
-
- val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])]
- if (settings.debug.value &&(tparams.nonEmpty || targs.nonEmpty))
- log("checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")")
-
- for {
- (tparam, targ) <- tparams zip targs
- // Prevent WildcardType from causing kind errors, as typevars may be higher-order
- if (targ != WildcardType) && (targ.isHigherKinded || tparam.typeParams.nonEmpty)
- } {
- // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
- targ.typeSymbolDirect.info // force symbol load for #4205
- val tparamsHO = targ.typeParams
-
- val (arityMismatches, varianceMismatches, stricterBounds) = (
- // NOTE: *not* targ.typeSymbol, which normalizes
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO)
- )
- if (explainErrors) {
- if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
- errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
- }
- }
- else if (error)
- return List((NoType, NoSymbol, Nil, Nil, Nil))
- }
-
- errors.toList
- }
-
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
@@ -6161,6 +6432,12 @@ A type's typeSymbol should never be inspected directly.
def this(msg: String) = this(NoPosition, msg)
}
+ // TODO: RecoverableCyclicReference should be separated from TypeError,
+ // but that would be a big change. Left for further refactoring.
+ /** An exception for cyclic references from which we can recover */
+ case class RecoverableCyclicReference(sym: Symbol)
+ extends TypeError("illegal cyclic reference involving " + sym)
+
class NoCommonType(tps: List[Type]) extends Throwable(
"lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
@@ -6169,14 +6446,11 @@ A type's typeSymbol should never be inspected directly.
def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
}
- /** An exception signalling a variance annotation/usage conflict */
- class VarianceError(msg: String) extends TypeError(msg)
-
/** The current indentation string for traces */
private var indent: String = ""
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
- private def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
+ protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
indent = indent + " "
val result = p(tp1, arg2)
@@ -6221,4 +6495,16 @@ A type's typeSymbol should never be inspected directly.
final val maxTostringRecursions = 50
private var tostringRecursions = 0
+
+ protected def typeToString(tpe: Type): String =
+ if (tostringRecursions >= maxTostringRecursions)
+ "..."
+ else
+ try {
+ tostringRecursions += 1
+ tpe.safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+
}
diff --git a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
index 1a8540c158..b21b33e138 100644
--- a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
@@ -184,6 +184,8 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
case _ => errorBadSignature("bad name tag: " + tag)
}
}
+ protected def readTermName(): TermName = readName().toTermName
+ protected def readTypeName(): TypeName = readName().toTypeName
/** Read a symbol */
protected def readSymbol(): Symbol = {
@@ -211,7 +213,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
return NoSymbol
if (tag == EXTMODCLASSref) {
- val moduleVar = owner.info.decl(nme.moduleVarName(name))
+ val moduleVar = owner.info.decl(nme.moduleVarName(name.toTermName))
if (moduleVar.isLazyAccessor)
return moduleVar.lazyAccessor.lazyAccessor
}
@@ -223,7 +225,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
// (2) Try with expanded name. Can happen if references to private
// symbols are read from outside: for instance when checking the children
// of a class. See #1722.
- fromName(nme.expandedName(name, owner)) orElse {
+ fromName(nme.expandedName(name.toTermName, owner)) orElse {
// (3) Try as a nested object symbol.
nestedObjectSymbol orElse {
// (4) Otherwise, fail.
@@ -295,15 +297,11 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
case MODULEsym =>
val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
if (isModuleRoot) moduleRoot
- else {
- val m = owner.newModule(name, clazz)
- clazz.sourceModule = m
- m
- }
+ else owner.newLinkedModule(clazz)
case VALsym =>
if (isModuleRoot) { assert(false); NoSymbol }
- else if (isMethodFlag) owner.newMethod(name)
- else owner.newValue(name)
+ else if (isMethodFlag) owner.newMethod(name.toTermName)
+ else owner.newValue(name.toTermName)
case _ =>
errorBadSignature("bad symbol tag: " + tag)
@@ -378,7 +376,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
// that it isn't right here. See #4757 for the immediate
// motivation to fix it.
val tparams = until(end, readSymbolRef) map (_ setFlag EXISTENTIAL)
- ExistentialType(tparams, restpe)
+ newExistentialType(tparams, restpe)
case ANNOTATEDtpe =>
var typeRef = readNat()
@@ -549,13 +547,13 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
case MODULEtree =>
setSymModsName()
- ModuleDef(mods, name, readTemplateRef())
+ ModuleDef(mods, name.toTermName, readTemplateRef())
case VALDEFtree =>
setSymModsName()
val tpt = readTreeRef()
val rhs = readTreeRef()
- ValDef(mods, name, tpt, rhs)
+ ValDef(mods, name.toTermName, tpt, rhs)
case DEFDEFtree =>
setSymModsName()
@@ -563,7 +561,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
val tpt = readTreeRef()
val rhs = readTreeRef()
- DefDef(mods, name, tparams, vparamss, tpt, rhs)
+ DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
case TYPEDEFtree =>
setSymModsName()
@@ -575,7 +573,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
setSymName()
val rhs = readTreeRef()
val params = until(end, readIdentRef)
- LabelDef(name, params, rhs)
+ LabelDef(name.toTermName, params, rhs)
case IMPORTtree =>
setSym()
diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
index 6980d28bfb..0092f73fe3 100644
--- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala
@@ -38,6 +38,7 @@ abstract class MutableSettings extends AbsSettings {
def explaintypes: BooleanSetting
def verbose: BooleanSetting
def uniqid: BooleanSetting
+ def Yshowsymkinds: BooleanSetting
def Xprintpos: BooleanSetting
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
diff --git a/src/compiler/scala/reflect/internal/transform/Erasure.scala b/src/compiler/scala/reflect/internal/transform/Erasure.scala
index c8cb6febfa..d59fc6d564 100644
--- a/src/compiler/scala/reflect/internal/transform/Erasure.scala
+++ b/src/compiler/scala/reflect/internal/transform/Erasure.scala
@@ -59,7 +59,7 @@ trait Erasure {
// included (use pre.baseType(cls.owner)).
//
// This requires that cls.isClass.
- @inline protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
+ protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
}
@@ -75,7 +75,7 @@ trait Erasure {
case TypeRef(pre, sym, args) =>
if (sym == ArrayClass)
if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
- else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectClass.tpe)
+ else if (args.head.typeSymbol.isBottomClass) ObjectArray
else typeRef(apply(pre), sym, args map this)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) erasedTypeRef(ObjectClass)
else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala
new file mode 100644
index 0000000000..94672097c4
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/util/Collections.scala
@@ -0,0 +1,158 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.internal.util
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+import mutable.ListBuffer
+
+/** Profiler driven changes.
+ */
+trait Collections {
+ /** True if all three arguments have the same number of elements and
+ * the function is true for all the triples.
+ */
+ @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])
+ (f: (A, B, C) => Boolean): Boolean = (
+ if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
+ else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail)(f)
+ )
+
+ /** All these mm methods are "deep map" style methods for
+ * mapping etc. on a list of lists.
+ */
+ final def mexists[A](xss: List[List[A]])(p: A => Boolean) =
+ xss exists (_ exists p)
+ final def mmap[A, B](xss: List[List[A]])(f: A => B) =
+ xss map (_ map f)
+ final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
+ xss foreach (_ foreach f)
+ final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
+ for (xs <- xss; x <- xs)
+ if (p(x)) return Some(x)
+ None
+ }
+ final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
+ for (xs <- xss; x <- xs; if p(x)) yield x
+
+ final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb += f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+ final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
+ if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
+ else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
+ }
+ final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb ++= f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+
+ final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
+ val lb = new ListBuffer[B]
+ var index = 0
+ var ys = xs
+ while (!ys.isEmpty) {
+ lb += f(ys.head, index)
+ ys = ys.tail
+ index += 1
+ }
+ lb.toList
+ }
+ final def collectMap2[A, B, C](xs1: List[A], xs2: List[B])(p: (A, B) => Boolean): Map[A, B] = {
+ if (xs1.isEmpty || xs2.isEmpty)
+ return Map()
+
+ val buf = immutable.Map.newBuilder[A, B]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ val x1 = ys1.head
+ val x2 = ys2.head
+ if (p(x1, x2))
+ buf += ((x1, x2))
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ buf.result
+ }
+ final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ }
+ final def foreach3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ f(ys1.head, ys2.head, ys3.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ }
+ final def exists2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (f(ys1.head, ys2.head))
+ return true
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ false
+ }
+ final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (!f(ys1.head, ys2.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ true
+ }
+ final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ if (!f(ys1.head, ys2.head, ys3.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ true
+ }
+}
+
+object Collections extends Collections { }
+
diff --git a/src/compiler/scala/tools/nsc/util/Origins.scala b/src/compiler/scala/reflect/internal/util/Origins.scala
index f8ba34ae3c..b9985c8f50 100644
--- a/src/compiler/scala/tools/nsc/util/Origins.scala
+++ b/src/compiler/scala/reflect/internal/util/Origins.scala
@@ -3,10 +3,12 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
-package util
+package scala.reflect
+package internal.util
-import scala.reflect.NameTransformer._
+import NameTransformer._
+import scala.collection.{ mutable, immutable }
+import Origins._
/** A debugging class for logging from whence a method is being called.
* Say you wanted to discover who was calling phase_= in SymbolTable.
@@ -33,10 +35,6 @@ import scala.reflect.NameTransformer._
}}}
*
*/
-
-import scala.collection.{ mutable, immutable }
-import Origins._
-
abstract class Origins {
type Rep
def newRep(xs: StackSlice): Rep
@@ -94,7 +92,9 @@ object Origins {
def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz))
def apply(tag: String, orElse: => Origins): Origins = {
counters find (_.tag == tag) getOrElse {
- returning(orElse setTag tag)(counters += _)
+ val res = orElse setTag tag
+ counters += res
+ res
}
}
diff --git a/src/compiler/scala/reflect/runtime/ConversionUtil.scala b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
index bd40200310..e75fd78590 100644
--- a/src/compiler/scala/reflect/runtime/ConversionUtil.scala
+++ b/src/compiler/scala/reflect/runtime/ConversionUtil.scala
@@ -17,36 +17,42 @@ trait ConversionUtil { self: SymbolTable =>
private val toScalaMap = new HashMap[J, S]
private val toJavaMap = new HashMap[S, J]
- def enter(j: J, s: S) = {
+ def enter(j: J, s: S) = synchronized {
debugInfo("cached: "+j+"/"+s)
toScalaMap(j) = s
toJavaMap(s) = j
}
- def toScala(key: J)(body: => S): S = toScalaMap get key match {
- case Some(v) =>
- v
- case none =>
- val result = body
- enter(key, result)
- result
+ def toScala(key: J)(body: => S): S = synchronized {
+ toScalaMap get key match {
+ case Some(v) =>
+ v
+ case none =>
+ val result = body
+ enter(key, result)
+ result
+ }
}
- def toJava(key: S)(body: => J): J = toJavaMap get key match {
- case Some(v) =>
- v
- case none =>
- val result = body
- enter(result, key)
- result
+ def toJava(key: S)(body: => J): J = synchronized {
+ toJavaMap get key match {
+ case Some(v) =>
+ v
+ case none =>
+ val result = body
+ enter(result, key)
+ result
+ }
}
- def toJavaOption(key: S)(body: => Option[J]): Option[J] = toJavaMap get key match {
- case None =>
- val result = body
- for (value <- result) enter(value, key)
- result
- case some => some
+ def toJavaOption(key: S)(body: => Option[J]): Option[J] = synchronized {
+ toJavaMap get key match {
+ case None =>
+ val result = body
+ for (value <- result) enter(value, key)
+ result
+ case some => some
+ }
}
}
diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala
index aa0572aceb..4c49c0221f 100644
--- a/src/compiler/scala/reflect/runtime/JavaToScala.scala
+++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala
@@ -45,7 +45,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
def javaClass(path: String): jClass[_] =
javaClass(path, defaultReflectiveClassLoader())
def javaClass(path: String, classLoader: JClassLoader): jClass[_] =
- classLoader.loadClass(path)
+ Class.forName(path, true, classLoader)
/** Does `path` correspond to a Java class with that fully qualified name? */
def isJavaClass(path: String): Boolean =
@@ -115,7 +115,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* @param jtvar The Java type variable
*/
private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): Symbol = {
- val tparam = sOwner(jtvar).newTypeParameter(NoPosition, newTypeName(jtvar.getName))
+ val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName))
.setInfo(new TypeParamCompleter(jtvar))
tparamCache enter (jtvar, tparam)
tparam
@@ -154,7 +154,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
override def load(sym: Symbol) = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
- val flags = toScalaFlags(jclazz.getModifiers, isClass = true)
+ val flags = toScalaClassFlags(jclazz.getModifiers)
clazz setFlag (flags | JAVA)
if (module != NoSymbol) {
module setFlag (flags & PRIVATE | JAVA)
@@ -175,7 +175,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
load(sym)
completeRest()
}
- def completeRest(): Unit = {
+ def completeRest(): Unit = self.synchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -241,16 +241,32 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* The Scala owner of the Scala class corresponding to the Java class `jclazz`
*/
private def sOwner(jclazz: jClass[_]): Symbol = {
- if (jclazz.isMemberClass)
- followStatic(classToScala(jclazz.getEnclosingClass), jclazz.getModifiers)
- else if (jclazz.isLocalClass)
- methodToScala(jclazz.getEnclosingMethod) orElse constrToScala(jclazz.getEnclosingConstructor)
- else if (jclazz.isPrimitive || jclazz.isArray)
+ if (jclazz.isMemberClass) {
+ val jEnclosingClass = jclazz.getEnclosingClass
+ val sEnclosingClass = classToScala(jEnclosingClass)
+ followStatic(sEnclosingClass, jclazz.getModifiers)
+ } else if (jclazz.isLocalClass) {
+ val jEnclosingMethod = jclazz.getEnclosingMethod
+ if (jEnclosingMethod != null) {
+ methodToScala(jEnclosingMethod)
+ } else {
+ val jEnclosingConstructor = jclazz.getEnclosingConstructor
+ constrToScala(jEnclosingConstructor)
+ }
+ } else if (jclazz.isPrimitive || jclazz.isArray) {
ScalaPackageClass
- else if (jclazz.getPackage != null)
- packageToScala(jclazz.getPackage)
- else
+ } else if (jclazz.getPackage != null) {
+ val jPackage = jclazz.getPackage
+ packageToScala(jPackage)
+ } else {
+ // @eb: a weird classloader might return a null package for something with a non-empty package name
+ // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
+ // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
+ // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
+ val jPackageName = jclazz.getName.substring(0, Math.max(jclazz.getName.lastIndexOf("."), 0))
+ assert(jPackageName == "")
EmptyPackageClass
+ }
}
/**
@@ -272,7 +288,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
*/
private def approximateMatch(sym: Symbol, jstr: String): Boolean =
(sym.name.toString == jstr) ||
- sym.isPrivate && nme.expandedName(sym.name, sym.owner).toString == jstr
+ sym.isPrivate && nme.expandedName(sym.name.toTermName, sym.owner).toString == jstr
/**
* Find declarations or definition in class `clazz` that maps to a Java
@@ -295,8 +311,10 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* @return A Scala method object that corresponds to `jmeth`.
*/
def methodToScala(jmeth: jMethod): Symbol = methodCache.toScala(jmeth) {
- val owner = followStatic(classToScala(jmeth.getDeclaringClass), jmeth.getModifiers)
- lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth)
+ val jOwner = jmeth.getDeclaringClass
+ var sOwner = classToScala(jOwner)
+ sOwner = followStatic(sOwner, jmeth.getModifiers)
+ lookup(sOwner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth)
}
/**
@@ -335,16 +353,27 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val name = newTermName(fullname drop (split + 1))
var pkg = owner.info decl name
if (pkg == NoSymbol) {
- pkg = owner.newPackage(NoPosition, name)
+ pkg = owner.newPackage(name)
pkg.moduleClass setInfo new LazyPackageType
- pkg setInfo pkg.moduleClass.tpe
- owner.info.decls enter pkg
+ pkg setInfoAndEnter pkg.moduleClass.tpe
info("made Scala "+pkg)
} else if (!pkg.isPackage)
throw new ReflectError(pkg+" is not a package")
pkg.moduleClass
}
+ private def scalaSimpleName(jclazz: jClass[_]): TypeName = {
+ val owner = sOwner(jclazz)
+ val enclosingClass = jclazz.getEnclosingClass
+ var prefix = if (enclosingClass != null) enclosingClass.getName else ""
+ val isObject = owner.isModuleClass && !owner.isPackageClass
+ if (isObject && !prefix.endsWith(nme.MODULE_SUFFIX_STRING)) prefix += nme.MODULE_SUFFIX_STRING
+ assert(jclazz.getName.startsWith(prefix))
+ var name = jclazz.getName.substring(prefix.length)
+ name = name.substring(name.lastIndexOf(".") + 1)
+ newTypeName(name)
+ }
+
/**
* The Scala class that corresponds to a given Java class.
* @param jclazz The Java class
@@ -353,33 +382,55 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* not available, wrapped from the Java reflection info.
*/
def classToScala(jclazz: jClass[_]): Symbol = classCache.toScala(jclazz) {
- if (jclazz.isMemberClass && !nme.isImplClassName(jclazz.getName)) {
- val sym = sOwner(jclazz).info.decl(newTypeName(jclazz.getSimpleName))
- assert(sym.isType, sym+"/"+jclazz+"/"+sOwner(jclazz)+"/"+jclazz.getSimpleName)
- sym.asInstanceOf[ClassSymbol]
- } else if (jclazz.isLocalClass || invalidClassName(jclazz.getName)) {
- // local classes and implementation classes not preserved by unpickling - treat as Java
- jclassAsScala(jclazz)
- } else if (jclazz.isArray) {
- ArrayClass
- } else jclazz match {
- case java.lang.Void.TYPE => UnitClass
- case java.lang.Byte.TYPE => ByteClass
- case java.lang.Character.TYPE => CharClass
- case java.lang.Short.TYPE => ShortClass
- case java.lang.Integer.TYPE => IntClass
- case java.lang.Long.TYPE => LongClass
- case java.lang.Float.TYPE => FloatClass
- case java.lang.Double.TYPE => DoubleClass
- case java.lang.Boolean.TYPE => BooleanClass
- case _ =>
+ val jname = javaTypeName(jclazz)
+ val owner = sOwner(jclazz)
+ val simpleName = scalaSimpleName(jclazz)
+
+ val sym = {
+ def lookup = {
+ def coreLookup(name: Name): Symbol = {
+ val sym = owner.info.decl(name)
+ sym orElse {
+ if (name.startsWith(nme.NAME_JOIN_STRING))
+ coreLookup(name.subName(1, name.length))
+ else
+ NoSymbol
+ }
+ }
+
+ if (nme.isModuleName(simpleName)) {
+ val moduleName = nme.stripModuleSuffix(simpleName).toTermName
+ val sym = coreLookup(moduleName)
+ if (sym == NoSymbol) sym else sym.moduleClass
+ } else {
+ coreLookup(simpleName)
+ }
+ }
+
+ if (jclazz.isMemberClass && !nme.isImplClassName(jname)) {
+ lookup
+ } else if (jclazz.isLocalClass || invalidClassName(jname)) {
+ // local classes and implementation classes not preserved by unpickling - treat as Java
+ jclassAsScala(jclazz)
+ } else if (jclazz.isArray) {
+ ArrayClass
+ } else javaTypeToValueClass(jclazz) orElse {
// jclazz is top-level - get signature
- sOwner(jclazz).info decl newTypeName(jclazz.getSimpleName)
-// val (clazz, module) = createClassModule(
-// sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _))
-// classCache enter (jclazz, clazz)
-// clazz
+ lookup
+ // val (clazz, module) = createClassModule(
+ // sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _))
+ // classCache enter (jclazz, clazz)
+ // clazz
+ }
+ }
+
+ if (!sym.isType) {
+ def msgNoSym = "no symbol could be loaded from %s (scala equivalent is %s) by name %s".format(owner, jclazz, simpleName)
+ def msgIsNotType = "not a type: symbol %s loaded from %s (scala equivalent is %s) by name %s".format(sym, owner, jclazz, simpleName)
+ assert(false, if (sym == NoSymbol) msgNoSym else msgIsNotType)
}
+
+ sym.asInstanceOf[ClassSymbol]
}
/**
@@ -411,7 +462,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val tparams = new ListBuffer[Symbol]
def targToScala(arg: jType): Type = arg match {
case jwild: WildcardType =>
- val tparam = owner.newExistential(NoPosition, newTypeName("T$" + tparams.length))
+ val tparam = owner.newExistential(newTypeName("T$" + tparams.length))
.setInfo(TypeBounds(
lub(jwild.getLowerBounds.toList map typeToScala),
glb(jwild.getUpperBounds.toList map typeToScala map objToAny)))
@@ -458,7 +509,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
private def jclassAsScala(jclazz: jClass[_]): Symbol = jclassAsScala(jclazz, sOwner(jclazz))
private def jclassAsScala(jclazz: jClass[_], owner: Symbol): Symbol = {
- val name = newTypeName(jclazz.getSimpleName)
+ val name = scalaSimpleName(jclazz)
val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
val (clazz, module) = createClassModule(owner, name, completer)
classCache enter (jclazz, clazz)
@@ -472,9 +523,11 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* @return A Scala value symbol that wraps all reflection info of `jfield`
*/
private def jfieldAsScala(jfield: jField): Symbol = fieldCache.toScala(jfield) {
- val field = sOwner(jfield).newValue(NoPosition, newTermName(jfield.getName))
- .setFlag(toScalaFlags(jfield.getModifiers, isField = true) | JAVA)
- .setInfo(typeToScala(jfield.getGenericType))
+ val field = (
+ sOwner(jfield)
+ newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
+ setInfo typeToScala(jfield.getGenericType)
+ )
fieldCache enter (jfield, field)
copyAnnotations(field, jfield)
field
@@ -492,8 +545,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
*/
private def jmethodAsScala(jmeth: jMethod): Symbol = methodCache.toScala(jmeth) {
val clazz = sOwner(jmeth)
- val meth = clazz.newMethod(NoPosition, newTermName(jmeth.getName))
- .setFlag(toScalaFlags(jmeth.getModifiers) | JAVA)
+ val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers))
methodCache enter (jmeth, meth)
val tparams = jmeth.getTypeParameters.toList map createTypeParameter
val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
@@ -515,8 +567,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
private def jconstrAsScala(jconstr: jConstructor[_]): Symbol = {
// [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out.
val clazz = sOwner(jconstr)
- val constr = clazz.newMethod(NoPosition, nme.CONSTRUCTOR)
- .setFlag(toScalaFlags(jconstr.getModifiers) | JAVA)
+ val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers))
constructorCache enter (jconstr, constr)
val tparams = jconstr.getTypeParameters.toList map createTypeParameter
val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
diff --git a/src/compiler/scala/reflect/runtime/Loaders.scala b/src/compiler/scala/reflect/runtime/Loaders.scala
index 35b3a16dc2..4b35a5b37e 100644
--- a/src/compiler/scala/reflect/runtime/Loaders.scala
+++ b/src/compiler/scala/reflect/runtime/Loaders.scala
@@ -63,8 +63,8 @@ trait Loaders { self: SymbolTable =>
*/
protected def createClassModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
- val clazz = owner.newClass(NoPosition, name)
- val module = owner.newModule(NoPosition, name.toTermName)
+ val clazz = owner.newClass(name)
+ val module = owner.newModule(name.toTermName)
owner.info.decls enter clazz
owner.info.decls enter module
initClassModule(clazz, module, completer(clazz, module))
@@ -97,9 +97,9 @@ trait Loaders { self: SymbolTable =>
0 < dp && dp < (name.length - 1)
}
- class PackageScope(pkgClass: Symbol) extends Scope {
+ class PackageScope(pkgClass: Symbol) extends Scope() with SynchronizedScope {
assert(pkgClass.isType)
- private var negatives = mutable.Set[Name]()
+ private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected.
override def lookupEntry(name: Name): ScopeEntry = {
val e = super.lookupEntry(name)
if (e != null)
diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala
index 9490dc4ad7..028a660a35 100644
--- a/src/compiler/scala/reflect/runtime/Mirror.scala
+++ b/src/compiler/scala/reflect/runtime/Mirror.scala
@@ -12,19 +12,28 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
import definitions._
- def classWithName(name: String): Symbol = classToScala(javaClass(name))
- def getClass(obj: AnyRef): Symbol = classToScala(obj.getClass)
- def getType(obj: AnyRef): Type = typeToScala(obj.getClass)
+ def symbolForName(name: String): Symbol = {
+ val clazz = javaClass(name, defaultReflectiveClassLoader())
+ classToScala(clazz)
+ }
+
+ def companionInstance(clazz: Symbol): AnyRef = {
+ val singleton = ReflectionUtils.singletonInstance(clazz.fullName, defaultReflectiveClassLoader())
+ singleton
+ }
+
+ def symbolOfInstance(obj: Any): Symbol = classToScala(obj.getClass)
+ def typeOfInstance(obj: Any): Type = typeToScala(obj.getClass)
// to do add getClass/getType for instances of primitive types, probably like this:
// def getClass[T <: AnyVal : Manifest](x: T): Symbol = manifest[T].getClass
- def getValue(receiver: AnyRef, field: Symbol): Any = {
+ def getValueOfField(receiver: AnyRef, field: Symbol): Any = {
fieldToJava(field).get(receiver)
}
- def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit = {
+ def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit = {
fieldToJava(field).set(receiver, value)
}
- def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any = {
+ def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any = {
if (meth.owner == ArrayClass) {
meth.name match {
case nme.length => return Array.getLength(receiver)
@@ -32,7 +41,9 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
case nme.update => return Array.set(receiver, args(0).asInstanceOf[Int], args(1))
}
}
- methodToJava(meth).invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+
+ val jmeth = methodToJava(meth)
+ jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
}
override def classToType(jclazz: java.lang.Class[_]): Type = typeToScala(jclazz)
@@ -40,7 +51,8 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe
override def typeToClass(tpe: Type): java.lang.Class[_] = typeToJavaClass(tpe)
override def symbolToClass(sym: Symbol): java.lang.Class[_] = classToJava(sym)
-
+
+ override def inReflexiveMirror = true
}
object Mirror extends Mirror
diff --git a/src/compiler/scala/reflect/runtime/ScalaToJava.scala b/src/compiler/scala/reflect/runtime/ScalaToJava.scala
index b1e4d6224c..405a00de8d 100644
--- a/src/compiler/scala/reflect/runtime/ScalaToJava.scala
+++ b/src/compiler/scala/reflect/runtime/ScalaToJava.scala
@@ -29,17 +29,7 @@ trait ScalaToJava extends ConversionUtil { self: SymbolTable =>
def noClass = throw new ClassNotFoundException("no Java class corresponding to "+clazz+" found")
//println("classToJava "+clazz+" "+clazz.owner+" "+clazz.owner.isPackageClass)//debug
if (clazz.isValueClass)
- clazz match {
- case UnitClass => java.lang.Void.TYPE
- case ByteClass => java.lang.Byte.TYPE
- case CharClass => java.lang.Character.TYPE
- case ShortClass => java.lang.Short.TYPE
- case IntClass => java.lang.Integer.TYPE
- case LongClass => java.lang.Long.TYPE
- case FloatClass => java.lang.Float.TYPE
- case DoubleClass => java.lang.Double.TYPE
- case BooleanClass => java.lang.Boolean.TYPE
- }
+ valueClassToJavaType(clazz)
else if (clazz == ArrayClass)
noClass
else if (clazz.owner.isPackageClass)
@@ -54,7 +44,7 @@ trait ScalaToJava extends ConversionUtil { self: SymbolTable =>
}
private def expandedName(sym: Symbol): String =
- if (sym.isPrivate) nme.expandedName(sym.name, sym.owner).toString
+ if (sym.isPrivate) nme.expandedName(sym.name.toTermName, sym.owner).toString
else sym.name.toString
def fieldToJava(fld: Symbol): jField = fieldCache.toJava(fld) {
diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala
index 2a6cdea519..b4f0123114 100644
--- a/src/compiler/scala/reflect/runtime/Settings.scala
+++ b/src/compiler/scala/reflect/runtime/Settings.scala
@@ -28,6 +28,7 @@ class Settings extends internal.settings.MutableSettings {
val explaintypes = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
+ val Yshowsymkinds = new BooleanSetting(false)
val Xprintpos = new BooleanSetting(false)
val printtypes = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
diff --git a/src/compiler/scala/reflect/runtime/SymbolTable.scala b/src/compiler/scala/reflect/runtime/SymbolTable.scala
index d1a806bcef..5331f0a53e 100644
--- a/src/compiler/scala/reflect/runtime/SymbolTable.scala
+++ b/src/compiler/scala/reflect/runtime/SymbolTable.scala
@@ -6,7 +6,7 @@ package runtime
* It can be used either from the reflexive mirror itself (class Universe), or else from
* a runtime compiler that uses reflection to get a class information (class scala.tools.nsc.ReflectGlobal)
*/
-trait SymbolTable extends internal.SymbolTable with JavaToScala with ScalaToJava with Loaders {
+trait SymbolTable extends internal.SymbolTable with JavaToScala with ScalaToJava with Loaders with SynchronizedOps {
/** If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
* <owner>.<name>, otherwise return NoSymbol.
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
new file mode 100644
index 0000000000..72adbd4004
--- /dev/null
+++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala
@@ -0,0 +1,51 @@
+package scala.reflect
+package runtime
+
+trait SynchronizedOps extends internal.SymbolTable
+ with SynchronizedSymbols
+ with SynchronizedTypes { self: SymbolTable =>
+
+// Names
+
+ private lazy val nameLock = new Object
+
+ override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
+ override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
+
+// BaseTypeSeqs
+
+ override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+
+ trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
+ override def apply(i: Int): Type = synchronized { super.apply(i) }
+ override def rawElem(i: Int) = synchronized { super.rawElem(i) }
+ override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) }
+ override def toList: List[Type] = synchronized { super.toList }
+ override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) }
+ override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) }
+ override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) }
+ override lazy val maxDepth = synchronized { maxDepthOfElems }
+ override def toString = synchronized { super.toString }
+
+ override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ }
+
+// Scopes
+
+ override def newScope = new Scope() with SynchronizedScope
+ override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
+
+ trait SynchronizedScope extends Scope {
+ override def isEmpty: Boolean = synchronized { super.isEmpty }
+ override def size: Int = synchronized { super.size }
+ override def enter(sym: Symbol) = synchronized { super.enter(sym) }
+ override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
+ override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
+ override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
+ override def lookupAll(name: Name) = synchronized { super.lookupAll(name) }
+ override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) }
+ override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) }
+ override def toList: List[Symbol] = synchronized { super.toList }
+ }
+}
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
new file mode 100644
index 0000000000..9baf94f71d
--- /dev/null
+++ b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -0,0 +1,119 @@
+package scala.reflect
+package runtime
+
+import internal.Flags.DEFERRED
+
+trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
+
+ override protected def nextId() = synchronized { super.nextId() }
+
+ override protected def freshExistentialName(suffix: String) =
+ synchronized { super.freshExistentialName(suffix) }
+
+ // Set the fields which point companions at one another. Returns the module.
+ override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
+ synchronized { super.connectModuleToClass(m, moduleClass) }
+
+ override def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar =
+ new FreeVar(name, value) with SynchronizedTermSymbol initFlags newFlags setInfo tpe
+
+ override protected def makeNoSymbol = new NoSymbol with SynchronizedSymbol
+
+ trait SynchronizedSymbol extends Symbol {
+
+ override def rawowner = synchronized { super.rawowner }
+ override def rawname = synchronized { super.rawname }
+ override def rawflags = synchronized { super.rawflags }
+
+ override def rawflags_=(x: FlagsType) = synchronized { super.rawflags_=(x) }
+ override def name_=(x: Name) = synchronized { super.name_=(x) }
+ override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
+
+ override def validTo = synchronized { super.validTo }
+ override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
+
+ override def pos = synchronized { super.pos }
+ override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
+
+ override def privateWithin = synchronized { super.privateWithin }
+ override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
+
+ override def info = synchronized { super.info }
+ override def info_=(info: Type) = synchronized { super.info_=(info) }
+ override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
+ override def rawInfo: Type = synchronized { super.rawInfo }
+
+ override def typeParams: List[Symbol] = synchronized { super.typeParams }
+
+ override def reset(completer: Type) = synchronized { super.reset(completer) }
+
+ override def infosString: String = synchronized { super.infosString }
+
+ override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
+ override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
+
+
+// ------ creators -------------------------------------------------------------------
+
+ override def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
+
+ override def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
+ new AbstractTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
+
+ override def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
+ new AliasTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
+
+ override def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
+ new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
+
+ override def newMethodSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
+ new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags
+
+ override def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
+ new ClassSymbol(this, pos, name) with SynchronizedClassSymbol initFlags newFlags
+
+ override def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
+ new ModuleClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags
+
+ override def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
+ if ((newFlags & DEFERRED) == 0L)
+ new TypeSkolem(this, pos, name, origin) with SynchronizedTypeSymbol initFlags newFlags
+ else
+ new TypeSkolem(this, pos, name, origin) with AbstractTypeMixin with SynchronizedTypeSymbol initFlags newFlags
+ }
+
+// ------- subclasses ---------------------------------------------------------------------
+
+ trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol {
+ override def referenced: Symbol = synchronized { super.referenced }
+ override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) }
+ }
+
+ trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
+ override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) }
+ }
+
+ trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol {
+ override def typeConstructor: Type = synchronized { super.typeConstructor }
+ override def tpe: Type = synchronized { super.tpe }
+ }
+
+ trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
+ override def sourceFile = synchronized { super.sourceFile }
+ override def sourceFile_=(f: AbstractFileType) = synchronized { super.sourceFile_=(f) }
+ override def thisSym: Symbol = synchronized { super.thisSym }
+ override def thisType: Type = synchronized { super.thisType }
+ override def typeOfThis: Type = synchronized { super.typeOfThis }
+ override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) }
+ override def children = synchronized { super.children }
+ override def addChild(sym: Symbol) = synchronized { super.addChild(sym) }
+ }
+
+ trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
+ override def sourceModule = synchronized { super.sourceModule }
+ override def sourceModule_=(module: Symbol) = synchronized { super.sourceModule_=(module: Symbol) }
+ override def implicitMembers: List[Symbol] = synchronized { super.implicitMembers }
+ }
+}
+
diff --git a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
new file mode 100644
index 0000000000..c842d3dd01
--- /dev/null
+++ b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala
@@ -0,0 +1,87 @@
+package scala.reflect
+package runtime
+
+/** This trait overrides methods in reflect.internal, bracketing
+ * them in synchronized { ... } to make them thread-safe
+ */
+trait SynchronizedTypes extends internal.Types { self: SymbolTable =>
+
+ // No sharing of map objects:
+ override protected def commonOwnerMap = new CommonOwnerMap
+
+ private val uniqueLock = new Object
+ override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { super.unique(tp) }
+
+ class SynchronizedUndoLog extends UndoLog {
+
+ override def clear() =
+ synchronized { super.clear() }
+
+ override def undo[T](block: => T): T =
+ synchronized { super.undo(block) }
+
+ override def undoUnless(block: => Boolean): Boolean =
+ synchronized { super.undoUnless(block) }
+ }
+
+ override protected def newUndoLog = new SynchronizedUndoLog
+
+ override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
+ synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
+
+ private val subsametypeLock = new Object
+
+ override def isSameType(tp1: Type, tp2: Type): Boolean =
+ subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
+
+ override def isDifferentType(tp1: Type, tp2: Type): Boolean =
+ subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
+
+ override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
+ subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
+
+ private val lubglbLock = new Object
+
+ override def glb(ts: List[Type]): Type =
+ lubglbLock.synchronized { super.glb(ts) }
+
+ override def lub(ts: List[Type]): Type =
+ lubglbLock.synchronized { super.lub(ts) }
+
+ private val indentLock = new Object
+
+ override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
+ indentLock.synchronized { super.explain(op, p, tp1, arg2) }
+ }
+
+ private val toStringLock = new Object
+
+ override protected def typeToString(tpe: Type): String =
+ toStringLock.synchronized(super.typeToString(tpe))
+
+ /* The idea of caches is as follows.
+ * When in reflexive mode, a cache is either null, or one sentinal
+ * value representing undefined or the final defined
+ * value. Hence, we can ask in non-synchronized ode whether the cache field
+ * is non null and different from the sentinel (if a sentinel exists).
+ * If that's true, the cache value is current.
+ * Otherwise we arrive in one of the defined... methods listed below
+ * which go through all steps in synchronized mode.
+ */
+
+ override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
+ tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
+
+ override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
+ tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
+
+ override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
+ tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
+
+ override protected def defineParentsOfTypeRef(tpe: TypeRef) =
+ tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
+
+ override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
+ tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
+
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 9f44e2047a..f52662ce6f 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -33,7 +33,7 @@ trait ToolBoxes extends { self: Universe =>
private def nextWrapperModuleName() = {
wrapCount += 1
- "__wrapper$" + wrapCount
+ newTermName("__wrapper$" + wrapCount)
}
private def moduleFileName(className: String) = className + "$"
@@ -41,31 +41,34 @@ trait ToolBoxes extends { self: Universe =>
private def isFree(t: Tree) = t.isInstanceOf[Ident] && t.symbol.isInstanceOf[FreeVar]
def typedTopLevelExpr(tree: Tree, pt: Type): Tree = {
- val ownerClass = EmptyPackageClass.newClass(newTypeName("<expression-owner>"))
- ownerClass.setInfo(new ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
- val owner = ownerClass.newLocalDummy(tree.pos)
- typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt)
+ // !!! Why is this is in the empty package? If it's only to make
+ // it inaccessible then please put it somewhere designed for that
+ // rather than polluting the empty package with synthetics.
+ trace("typing: ")(showAttributed(tree, true, true, settings.Yshowsymkinds.value))
+ val ownerClass = EmptyPackageClass.newClassWithInfo(newTypeName("<expression-owner>"), List(ObjectClass.tpe), newScope)
+ val owner = ownerClass.newLocalDummy(tree.pos)
+ val ttree = typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt)
+ trace("typed: ")(showAttributed(ttree, true, true, settings.Yshowsymkinds.value))
+ ttree
}
-
+
def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
case Some(sym) if sym != null && sym != NoSymbol => sym.owner
case _ => NoSymbol
}
-
+
def wrapInObject(expr: Tree, fvs: List[Symbol]): ModuleDef = {
- val obj = EmptyPackageClass.newModule(NoPosition, nextWrapperModuleName())
- val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), new Scope, obj.moduleClass)
+ val obj = EmptyPackageClass.newModule(nextWrapperModuleName())
+ val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), newScope, obj.moduleClass)
obj.moduleClass setInfo minfo
obj setInfo obj.moduleClass.tpe
- val meth = obj.moduleClass.newMethod(NoPosition, wrapperMethodName)
- def makeParam(fv: Symbol) = meth.newValueParameter(NoPosition, fv.name) setInfo fv.tpe
- meth setInfo MethodType(fvs map makeParam, expr.tpe)
+ val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName))
+ def makeParam(fv: Symbol) = meth.newValueParameter(fv.name.toTermName) setInfo fv.tpe
+ meth setInfo MethodType(fvs map makeParam, AnyClass.tpe)
minfo.decls enter meth
trace("wrapping ")(defOwner(expr) -> meth)
val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth))
- trace("wrapped: ")(showAttributed(methdef))
- resetAllAttrs(
- ModuleDef(
+ val moduledef = ModuleDef(
obj,
Template(
List(TypeTree(ObjectClass.tpe)),
@@ -74,7 +77,11 @@ trait ToolBoxes extends { self: Universe =>
List(),
List(List()),
List(methdef),
- NoPosition)))
+ NoPosition))
+ trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
+ val cleanedUp = resetLocalAttrs(moduledef)
+ trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
+ cleanedUp
}
def wrapInPackage(clazz: Tree): PackageDef =
@@ -87,8 +94,22 @@ trait ToolBoxes extends { self: Universe =>
}
def compileExpr(expr: Tree, fvs: List[Symbol]): String = {
+ // Previously toolboxes used to typecheck their inputs before compiling.
+ // Actually, the initial demo by Martin first typechecked the reified tree,
+ // then ran it, which typechecked it again, and only then launched the
+ // reflective compiler.
+ //
+ // However, as observed in https://issues.scala-lang.org/browse/SI-5464
+ // current implementation typechecking is not always idempotent.
+ // That's why we cannot allow inputs of toolboxes to be typechecked,
+ // at least not until the aforementioned issue is closed.
+ val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
+ if (!typed.isEmpty) {
+ throw new Error("cannot compile trees that are already typed")
+ }
+
val mdef = wrapInObject(expr, fvs)
- val pdef = trace("wrapped: ")(wrapInPackage(mdef))
+ val pdef = wrapInPackage(mdef)
val unit = wrapInCompilationUnit(pdef)
val run = new Run
run.compileUnits(List(unit), run.namerPhase)
@@ -99,36 +120,44 @@ trait ToolBoxes extends { self: Universe =>
jclazz.getDeclaredMethods.find(_.getName == name).get
def runExpr(expr: Tree): Any = {
- val etpe = expr.tpe
val fvs = (expr filter isFree map (_.symbol)).distinct
-
+
reporter.reset()
val className = compileExpr(expr, fvs)
if (reporter.hasErrors) {
throw new Error("reflective compilation has failed")
}
-
+
if (settings.debug.value) println("generated: "+className)
val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
val singleton = jfield.get(null)
- val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*)
- if (etpe.typeSymbol != FunctionClass(0)) result
- else {
- val applyMeth = result.getClass.getMethod("apply")
- applyMeth.invoke(result)
- }
+ // @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions,
+ // but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces.
+// val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*)
+// if (etpe.typeSymbol != FunctionClass(0)) result
+// else {
+// val applyMeth = result.getClass.getMethod("apply")
+// applyMeth.invoke(result)
+// }
+ jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*)
}
-
- def showAttributed(tree: Tree): String = {
- val saved = settings.printtypes.value
+
+ def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = {
+ val saved1 = settings.printtypes.value
+ val saved2 = settings.uniqid.value
+ val saved3 = settings.Yshowsymkinds.value
try {
- settings.printtypes.value = true
- //settings.uniqid.value = true
+ settings.printtypes.value = printTypes
+ settings.uniqid.value = printIds
+ settings.Yshowsymkinds.value = printKinds
tree.toString
- } finally
- compiler.settings.printtypes.value = saved
+ } finally {
+ settings.printtypes.value = saved1
+ settings.uniqid.value = saved2
+ settings.Yshowsymkinds.value = saved3
+ }
}
}
@@ -148,7 +177,13 @@ trait ToolBoxes extends { self: Universe =>
}
command.settings.outputDirs setSingleOutput virtualDirectory
- new ToolBoxGlobal(command.settings, reporter)
+ val instance = new ToolBoxGlobal(command.settings, reporter)
+
+ // need to establish a run an phase because otherwise we run into an assertion in TypeHistory
+ // that states that the period must be different from NoPeriod
+ val run = new instance.Run
+ instance.phase = run.refchecksPhase
+ instance
}
lazy val importer = new compiler.Importer {
@@ -158,36 +193,26 @@ trait ToolBoxes extends { self: Universe =>
lazy val exporter = importer.reverse
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader)
-
- private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = {
- // need to establish a run an phase because otherwise we run into an assertion in TypeHistory
- // that states that the period must be different from NoPeriod
- val run = new compiler.Run
- compiler.phase = run.refchecksPhase
- val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
- val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type])
-// val typer = compiler.typer.atOwner(ctree, if (owner.isModule) cowner.moduleClass else cowner)
- val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt)
- ttree
- }
def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = {
if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType)
- val ttree = importAndTypeCheck(tree, expectedType)
- exporter.importTree(ttree).asInstanceOf[rm.Tree]
+ val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
+ val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type])
+ val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt)
+ val rmttree = exporter.importTree(ttree).asInstanceOf[rm.Tree]
+ rmttree
}
def typeCheck(tree: rm.Tree): rm.Tree =
typeCheck(tree, WildcardType.asInstanceOf[rm.Type])
- def showAttributed(tree: rm.Tree): String =
- compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]))
+ def showAttributed(tree: rm.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
+ compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]), printTypes, printIds, printKinds)
- def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = {
- val ttree = importAndTypeCheck(tree, expectedType)
- compiler.runExpr(ttree)
+ def runExpr(tree: rm.Tree): Any = {
+ if (compiler.settings.verbose.value) println("running "+tree)
+ val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
+ compiler.runExpr(ctree)
}
-
- def runExpr(tree: rm.Tree): Any = runExpr(tree, WildcardType.asInstanceOf[rm.Type])
}
}
diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
index 8c9e6a2565..61001a4778 100644
--- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
+++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
@@ -2,12 +2,12 @@ package scala.reflect
package runtime
trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
-
- def staticClass(fullname: String): Symbol = definitions.getClass(newTypeName(fullname))
- def staticModule(fullname: String): Symbol = definitions.getModule(newTermName(fullname))
-
- def thisModuleType(fullname: String) =
- definitions.getModule(fullname).moduleClass.thisType
+ /** A comment to the effect of why initialize was added to all these
+ * would be appreciated. (We may as well start somewhere.)
+ */
+ def staticClass(fullname: String) = definitions.getRequiredClass(fullname).initialize
+ def staticModule(fullname: String) = definitions.getRequiredModule(fullname).initialize
+ def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.initialize.thisType
/** Selects type symbol with given name from the defined members of prefix type
*/
@@ -41,7 +41,7 @@ trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
selectIn(owner.info, idx)
}
- def freeVar(name: String, info: Type, value: Any) = new FreeVar(name, info, value)
+ def newFreeVar(name: String, info: Type, value: Any) = newFreeVar(newTermName(name), info, value)
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers =
Modifiers(flags, privateWithin, annotations)
diff --git a/src/compiler/scala/reflect/runtime/Universe.scala b/src/compiler/scala/reflect/runtime/Universe.scala
index c786bb86c5..700f819226 100644
--- a/src/compiler/scala/reflect/runtime/Universe.scala
+++ b/src/compiler/scala/reflect/runtime/Universe.scala
@@ -16,7 +16,7 @@ class Universe extends SymbolTable {
val gen = new TreeGen { val global: Universe.this.type = Universe.this }
- def settings = new Settings
+ lazy val settings = new Settings
def forInteractive = false
def forScaladoc = false
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 7aff4e3e8e..3c79fcd3fb 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -90,7 +90,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for properties that refer to compiler phases. */
object CompilerPhase extends PermissibleValue {
- val values = List("namer", "typer", "pickler", "refchecks", "liftcode",
+ val values = List("namer", "typer", "pickler", "refchecks",
"uncurry", "tailcalls", "specialize", "explicitouter",
"erasure", "lazyvals", "lambdalift", "constructors",
"flatten", "mixin", "cleanup", "icode", "inliner",
@@ -608,7 +608,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
if (!deprecation.isEmpty) settings.deprecation.value = deprecation.get
if (!nobootcp.isEmpty) settings.nobootcp.value = nobootcp.get
if (!nowarn.isEmpty) settings.nowarn.value = nowarn.get
- if (!optimise.isEmpty) settings.XO.value = optimise.get
+ if (!optimise.isEmpty) settings.optimise.value = optimise.get
if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 253d1dec5d..92d6e6320c 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -43,7 +43,8 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `deprecation`,
* - `docgenerator`,
* - `docrootcontent`,
- * - `unchecked`.
+ * - `unchecked`,
+ * - `nofail`.
*
* It also takes the following parameters as nested elements:
* - `src` (for srcdir),
@@ -122,7 +123,10 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the compiler to generate unchecked information. */
private var unchecked: Boolean = false
-
+
+ /** Instruct the ant task not to fail in the event of errors */
+ private var nofail: Boolean = false
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -352,6 +356,17 @@ class Scaladoc extends ScalaMatchingTask {
def setDocUncompilable(input: String) {
docUncompilable = Some(input)
}
+
+ /** Set the `nofail` info attribute.
+ *
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off.
+ */
+ def setNoFail(input: String) {
+ if (Flag.isPermissible(input))
+ nofail = "yes".equals(input) || "on".equals(input)
+ else
+ buildError("Unknown nofail flag '" + input + "'")
+ }
/*============================================================================*\
** Properties getters **
@@ -553,6 +568,8 @@ class Scaladoc extends ScalaMatchingTask {
Pair(docSettings, sourceFiles)
}
+ def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
+
/** Performs the compilation. */
override def execute() = {
val Pair(docSettings, sourceFiles) = initialize
@@ -561,7 +578,7 @@ class Scaladoc extends ScalaMatchingTask {
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(sourceFiles.map (_.toString))
if (reporter.ERROR.count > 0)
- buildError(
+ safeBuildError(
"Document failed with " +
reporter.ERROR.count + " error" +
(if (reporter.ERROR.count > 1) "s" else "") +
@@ -576,11 +593,11 @@ class Scaladoc extends ScalaMatchingTask {
} catch {
case exception: Throwable if exception.getMessage ne null =>
exception.printStackTrace()
- buildError("Document failed because of an internal documenter error (" +
+ safeBuildError("Document failed because of an internal documenter error (" +
exception.getMessage + "); see the error output for details.")
case exception =>
exception.printStackTrace()
- buildError("Document failed because of an internal documenter error " +
+ safeBuildError("Document failed because of an internal documenter error " +
"(no error message provided); see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index a39de64c5a..5199e273d7 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -13,6 +13,7 @@ import java.io.{ File, FileWriter }
import org.apache.tools.ant.Project
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
+import org.apache.tools.ant.BuildException
import scala.tools.nsc.io
import scala.tools.nsc.util.ScalaClassLoader
@@ -150,7 +151,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
val res = execWithArgFiles(java, paths)
if (failOnError && res != 0)
- sys.error("Compilation failed because of an internal compiler error;"+
+ throw new BuildException("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 9f1fbc4524..5949689b24 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -86,4 +86,6 @@ goto :eof
:end
@@endlocal
-exit /b %errorlevel%
+
+REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu
+@@%COMSPEC% /C exit %errorlevel% >nul
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 8f82c997db..7c9599dc45 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -30,7 +30,7 @@ trait AnyValReps {
" * @return the bitwise negation of this value\n" +
" * @example {{{\n" +
" * ~5 == -6\n" +
- " * // in binary: ~00000101 == \n" +
+ " * // in binary: ~00000101 ==\n" +
" * // 11111010\n" +
" * }}}\n" +
" */") :: ops
@@ -44,9 +44,9 @@ trait AnyValReps {
" * @return the bitwise OR of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 | 0xaa) == 0xfa\n" +
- " * // in binary: 11110000 \n" +
- " * // | 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // | 10101010\n" +
+ " * // --------\n" +
" * // 11111010\n" +
" * }}}\n" +
" */"),
@@ -54,9 +54,9 @@ trait AnyValReps {
" * @return the bitwise AND of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 & 0xaa) == 0xa0\n" +
- " * // in binary: 11110000 \n" +
- " * // & 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // & 10101010\n" +
+ " * // --------\n" +
" * // 10100000\n" +
" * }}}\n" +
" */"),
@@ -64,9 +64,9 @@ trait AnyValReps {
" * @return the bitwise XOR of this value and x\n" +
" * @example {{{\n" +
" * (0xf0 ^ 0xaa) == 0x5a\n" +
- " * // in binary: 11110000 \n" +
- " * // ^ 10101010 \n" +
- " * // -------- \n" +
+ " * // in binary: 11110000\n" +
+ " * // ^ 10101010\n" +
+ " * // --------\n" +
" * // 01011010\n" +
" * }}}\n" +
" */"))
@@ -83,11 +83,11 @@ trait AnyValReps {
Op(">>>", "/**\n" +
" * @return this value bit-shifted right by the specified number of bits,\n" +
- " * filling the new left bits with zeroes. \n" +
+ " * filling the new left bits with zeroes.\n" +
" * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}\n" +
" * @example {{{\n" +
- " * -21 >>> 3 == 536870909 \n" +
- " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == \n" +
+ " * -21 >>> 3 == 536870909\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==\n" +
" * // 00011111 11111111 11111111 11111101\n" +
" * }}}\n" +
" */"),
@@ -97,8 +97,8 @@ trait AnyValReps {
" * filling in the right bits with the same value as the left-most bit of this.\n" +
" * The effect of this is to retain the sign of the value.\n" +
" * @example {{{\n" +
- " * -21 >> 3 == -3 \n" +
- " * // in binary: 11111111 11111111 11111111 11101011 >> 3 == \n" +
+ " * -21 >> 3 == -3\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==\n" +
" * // 11111111 11111111 11111111 11111101\n" +
" * }}}\n" +
" */"))
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 470207fd35..940d115b2f 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -74,6 +74,9 @@ trait CompilationUnits { self: Global =>
* It is empty up to phase 'icode'.
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
+
+ def echo(pos: Position, msg: String) =
+ reporter.echo(pos, msg)
def error(pos: Position, msg: String) =
reporter.error(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index b10ac78ac7..6393ade146 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -136,9 +136,9 @@ class StandardCompileServer extends SocketServer {
}
if (command.shouldStopWithInfo)
- reporter.info(null, command.getInfoMessage(newGlobal(newSettings, reporter)), true)
+ reporter.echo(command.getInfoMessage(newGlobal(newSettings, reporter)))
else if (command.files.isEmpty)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else {
if (isCompilerReusable) {
info("[Reusing existing Global instance.]")
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index db95c1442b..0c52954a0b 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -24,8 +24,8 @@ abstract class Driver {
protected def doCompile(compiler: Global) {
if (command.files.isEmpty) {
- reporter.info(null, command.usageMsg, true)
- reporter.info(null, compiler.pluginOptionsHelp, true)
+ reporter.echo(command.usageMsg)
+ reporter.echo(compiler.pluginOptionsHelp)
} else {
val run = new compiler.Run()
run compile command.files
@@ -40,14 +40,14 @@ abstract class Driver {
settings = command.settings
if (settings.version.value) {
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
} else if (processSettingsHook()) {
val compiler = newCompiler()
try {
if (reporter.hasErrors)
reporter.flush()
else if (command.shouldStopWithInfo)
- reporter.info(null, command.getInfoMessage(compiler), true)
+ reporter.echo(command.getInfoMessage(compiler))
else
doCompile(compiler)
} catch {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index f70aa60309..4493188b31 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -13,7 +13,7 @@ import scala.tools.util.{ Profiling, PathResolver }
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ NoPosition, Exceptional, ClassPath, SourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ShowPickled, ScalaClassLoader, returning }
+import util.{ NoPosition, Exceptional, ClassPath, SourceFile, NoSourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ShowPickled, ScalaClassLoader, returning }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
@@ -37,8 +37,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
with Plugins
with PhaseAssembly
with Trees
+ with Reifiers
with TreePrinters
with DocComments
+ with MacroContext
with symtab.Positions {
override def settings = currentSettings
@@ -123,7 +125,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Print tree in detailed form */
object nodePrinters extends {
val global: Global.this.type = Global.this
- } with NodePrinters {
+ } with NodePrinters with ReifyPrinters {
infolevel = InfoLevel.Verbose
}
@@ -133,6 +135,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
} with TreeBrowsers
val nodeToString = nodePrinters.nodeToString
+ val reifiedNodeToString = nodePrinters.reifiedNodeToString
val treeBrowser = treeBrowsers.create()
// ------------ Hooks for interactive mode-------------------------
@@ -151,18 +154,35 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Register top level class (called on entering the class)
*/
def registerTopLevelSym(sym: Symbol) {}
-
+
// ------------------ Reporting -------------------------------------
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
def error(msg: String) = globalError(msg)
def globalError(msg: String) = reporter.error(NoPosition, msg)
- def inform(msg: String) = reporter.info(NoPosition, msg, true)
+ def inform(msg: String) = reporter.echo(msg)
def warning(msg: String) =
if (opt.fatalWarnings) globalError(msg)
else reporter.warning(NoPosition, msg)
+ // Getting in front of Predef's asserts to supplement with more info.
+ // This has the happy side effect of masking the one argument forms
+ // of assert and require (but for now I've reproduced them here,
+ // because there are a million to fix.)
+ @inline final def assert(assertion: Boolean, message: => Any) {
+ Predef.assert(assertion, supplementErrorMessage("" + message))
+ }
+ @inline final def assert(assertion: Boolean) {
+ assert(assertion, "")
+ }
+ @inline final def require(requirement: Boolean, message: => Any) {
+ Predef.require(requirement, supplementErrorMessage("" + message))
+ }
+ @inline final def require(requirement: Boolean) {
+ require(requirement, "")
+ }
+
// Needs to call error to make sure the compile fails.
override def abort(msg: String): Nothing = {
error(msg)
@@ -192,10 +212,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x))
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
- def logResult[T](msg: String)(result: T): T = {
- log(msg + ": " + result)
- result
- }
def logError(msg: String, t: Throwable): Unit = ()
// Over 200 closure objects are eliminated by inlining this.
@inline final def log(msg: => AnyRef): Unit =
@@ -378,10 +394,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
}
final def applyPhase(unit: CompilationUnit) {
+ if ((unit ne null) && unit.exists)
+ lastSeenSourceFile = unit.source
+
if (opt.echoFilenames)
inform("[running phase " + name + " on " + unit + "]")
- val unit0 = currentRun.currentUnit
+ val unit0 = currentUnit
try {
currentRun.currentUnit = unit
if (!cancelled(unit)) {
@@ -390,7 +409,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
}
currentRun.advanceUnit
} finally {
- //assert(currentRun.currentUnit == unit)
+ //assert(currentUnit == unit)
currentRun.currentUnit = unit0
}
}
@@ -440,17 +459,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
val runsRightAfter = None
} with RefChecks
- // phaseName = "liftcode"
- object liftcode extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]("refchecks")
- val runsRightAfter = None
- } with LiftCode
-
// phaseName = "uncurry"
override object uncurry extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("refchecks", "liftcode")
+ val runsAfter = List[String]("refchecks")
val runsRightAfter = None
} with UnCurry
@@ -618,7 +630,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
object icodeChecker extends icodeCheckers.ICodeChecker()
object typer extends analyzer.Typer(
- analyzer.NoContext.make(EmptyTree, Global.this.definitions.RootClass, new Scope)
+ analyzer.NoContext.make(EmptyTree, Global.this.definitions.RootClass, newScope)
)
/** Add the internal compiler phases to the phases set.
@@ -635,7 +647,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
superAccessors -> "add super accessors in traits and nested classes",
pickler -> "serialize symbol tables",
refChecks -> "reference/override checking, translate nested objects",
- liftcode -> "reify trees",
uncurry -> "uncurry, translate function values to anonymous classes",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
@@ -784,9 +795,44 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
curRun = null
}
+ /** There are common error conditions where when the exception hits
+ * here, currentRun.currentUnit is null. This robs us of the knowledge
+ * of what file was being compiled when it broke. Since I really
+ * really want to know, this hack.
+ */
+ private var lastSeenSourceFile: SourceFile = NoSourceFile
+
/** The currently active run
*/
- def currentRun: Run = curRun
+ def currentRun: Run = curRun
+ def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
+ def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+
+ @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
+ @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
+ @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
+
+ /** Don't want to introduce new errors trying to report errors,
+ * so swallow exceptions.
+ */
+ override def supplementErrorMessage(errorMessage: String): String = try {
+ """|
+ | while compiling: %s
+ | current phase: %s
+ | library version: %s
+ | compiler version: %s
+ | reconstructed args: %s
+ |
+ |%s""".stripMargin.format(
+ currentSource.path,
+ phase,
+ scala.util.Properties.versionString,
+ Properties.versionString,
+ settings.recreateArgs.mkString(" "),
+ if (opt.debug) "Current unit body:\n" + currentUnit.body + "\n" + errorMessage else errorMessage
+ )
+ }
+ catch { case x: Exception => errorMessage }
/** The id of the currently active run
*/
@@ -801,10 +847,40 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** A Run is a single execution of the compiler on a sets of units
*/
class Run {
+ /** Have been running into too many init order issues with Run
+ * during erroneous conditions. Moved all these vals up to the
+ * top of the file so at least they're not trivially null.
+ */
var isDefined = false
+ /** The currently compiled unit; set from GlobalPhase */
+ var currentUnit: CompilationUnit = NoCompilationUnit
+
+ /** Counts for certain classes of warnings during this run. */
+ var deprecationWarnings: List[(Position, String)] = Nil
+ var uncheckedWarnings: List[(Position, String)] = Nil
+
+ /** A flag whether macro expansions failed */
+ var macroExpansionFailed = false
+
/** To be initialized from firstPhase. */
private var terminalPhase: Phase = NoPhase
+ private val unitbuf = new mutable.ListBuffer[CompilationUnit]
+ val compiledFiles = new mutable.HashSet[String]
+
+ /** A map from compiled top-level symbols to their source files */
+ val symSource = new mutable.HashMap[Symbol, AbstractFile]
+
+ /** A map from compiled top-level symbols to their picklers */
+ val symData = new mutable.HashMap[Symbol, PickleBuffer]
+
+ private var phasec: Int = 0 // phases completed
+ private var unitc: Int = 0 // units completed this phase
+ private var _unitbufSize = 0
+
+ def size = _unitbufSize
+ override def toString = "scalac Run for:\n " + compiledFiles.toList.sorted.mkString("\n ")
+
// Calculate where to stop based on settings -Ystop-before or -Ystop-after.
// Slightly complicated logic due to wanting -Ystop-before:parser to fail rather
// than mysteriously running to completion.
@@ -898,13 +974,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
// --------------- Miscellania -------------------------------
- /** The currently compiled unit; set from GlobalPhase */
- var currentUnit: CompilationUnit = _
-
- /** Counts for certain classes of warnings during this run. */
- var deprecationWarnings: List[(Position, String)] = Nil
- var uncheckedWarnings: List[(Position, String)] = Nil
-
/** Progress tracking. Measured in "progress units" which are 1 per
* compilation unit per phase completed.
*
@@ -936,9 +1005,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
}
def cancel() { reporter.cancelled = true }
-
- private var phasec: Int = 0 // phases completed
- private var unitc: Int = 0 // units completed this phase
+
private def currentProgress = (phasec * size) + unitc
private def totalProgress = (phaseDescriptors.size - 1) * size // -1: drops terminal phase
private def refreshProgress() = if (size > 0) progress(currentProgress, totalProgress)
@@ -977,11 +1044,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
// ----------- Units and top-level classes and objects --------
- private val unitbuf = new mutable.ListBuffer[CompilationUnit]
- val compiledFiles = new mutable.HashSet[String]
-
- private var _unitbufSize = 0
- def size = _unitbufSize
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
@@ -1005,12 +1067,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
*/
def units: Iterator[CompilationUnit] = unitbuf.iterator
- /** A map from compiled top-level symbols to their source files */
- val symSource = new mutable.HashMap[Symbol, AbstractFile]
-
- /** A map from compiled top-level symbols to their picklers */
- val symData = new mutable.HashMap[Symbol, PickleBuffer]
-
def registerPickle(sym: Symbol): Unit = {
// Convert all names to the type name: objects don't store pickled data
if (opt.showPhase && (opt.showNames exists (x => findNamedMember(x.toTypeName, sym) != NoSymbol))) {
@@ -1024,7 +1080,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.toplevelClass)
+ else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1062,7 +1118,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- atPhase(phase.next) {
+ afterPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1086,6 +1142,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
)
warn(deprecationWarnings.size, "deprecation", settings.deprecation)
warn(uncheckedWarnings.size, "unchecked", settings.unchecked)
+ if (macroExpansionFailed)
+ warning("some macros could not be expanded and code fell back to overridden methods;"+
+ "\nrecompiling with generated classfiles on the classpath might help.")
// todo: migrationWarnings
}
}
@@ -1111,6 +1170,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Compile list of units, starting with phase `fromPhase`
*/
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
+ try compileUnitsInternal(units, fromPhase)
+ catch { case ex =>
+ globalError(supplementErrorMessage("uncaught exception during compilation: " + ex.getClass.getName))
+ throw ex
+ }
+ }
+
+ private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
units foreach addUnit
if (opt.profileAll) {
inform("starting CPU profiling on compilation run")
@@ -1326,7 +1393,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def printAllUnits() {
print("[[syntax trees at end of " + phase + "]]")
- atPhase(phase.next) { currentRun.units foreach (treePrinter.print(_)) }
+ afterPhase(phase) { currentRun.units foreach (treePrinter.print(_)) }
}
private def findMemberFromRoot(fullName: Name): Symbol = {
diff --git a/src/compiler/scala/tools/nsc/MacroContext.scala b/src/compiler/scala/tools/nsc/MacroContext.scala
new file mode 100644
index 0000000000..72662291f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/MacroContext.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc
+
+import symtab.Flags._
+
+trait MacroContext extends reflect.macro.Context { self: Global =>
+
+ def captureVariable(vble: Symbol): Unit = vble setFlag CAPTURED
+
+ def referenceCapturedVariable(id: Ident): Tree = ReferenceToBoxed(id)
+}
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index d33be5bca0..c83ccfeef1 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -22,9 +22,4 @@ object Properties extends scala.util.PropertiesTrait {
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
def fileEndings = fileEndingString.split("""\|""").toList
-
- // System property java.home is the JRE root.
- // Environment variable JAVA_HOME is (supposed to be) the jdk root.
- // We need the latter to find javac, tools.jar, etc.
- def jdkHome = envOrElse("JAVA_HOME", javaHome)
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index a9330b053b..4fa2cc71e5 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -30,17 +30,17 @@ class ScalaDoc {
def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
if (docSettings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else if (docSettings.Xhelp.value)
- reporter.info(null, command.xusageMsg, true)
+ reporter.echo(command.xusageMsg)
else if (docSettings.Yhelp.value)
- reporter.info(null, command.yusageMsg, true)
+ reporter.echo(command.yusageMsg)
else if (docSettings.showPlugins.value)
reporter.warning(null, "Plugins are not available when using Scaladoc")
else if (docSettings.showPhases.value)
reporter.warning(null, "Phases are restricted when using Scaladoc")
else if (docSettings.help.value || !hasFiles)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else try {
if (docSettings.target.value == "msil")
msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index f9c818daf0..6a6379cca2 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -220,8 +220,8 @@ trait DocComments { self: Global =>
else site.info.baseClasses
searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
- case Some(str) if str startsWith '$' => lookupVariable(str.tail, site)
- case res => res orElse lookupVariable(vble, site.owner)
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
}
}
@@ -397,7 +397,7 @@ trait DocComments { self: Global =>
if (tpe != NoType) tpe
else {
val alias1 = alias.cloneSymbol(definitions.RootClass)
- alias1.name = repl.toTypeName
+ alias1.name = newTypeName(repl)
typeRef(NoPrefix, alias1, Nil)
}
case None =>
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ea51fc0141..9466d1c1f2 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -71,34 +71,39 @@ abstract class NodePrinters {
def nodeinfo(tree: Tree): String =
if (infolevel == InfoLevel.Quiet) ""
else {
- val buf = new StringBuilder(" // sym=" + tree.symbol)
- if (tree.hasSymbol) {
- if (tree.symbol.isPrimaryConstructor)
- buf.append(", isPrimaryConstructor")
- else if (tree.symbol.isConstructor)
- buf.append(", isConstructor")
- if (tree.symbol != NoSymbol)
- buf.append(", sym.owner=" + tree.symbol.owner)
- buf.append(", sym.tpe=" + tree.symbol.tpe)
- }
- buf.append(", tpe=" + tree.tpe)
- if (tree.tpe != null) {
- var sym = tree.tpe.termSymbol
- if (sym == NoSymbol) sym = tree.tpe.typeSymbol
- buf.append(", tpe.sym=" + sym)
- if (sym != NoSymbol) {
- buf.append(", tpe.sym.owner=" + sym.owner)
- if ((infolevel > InfoLevel.Normal) &&
- !(sym.owner eq definitions.ScalaPackageClass) &&
- !sym.isModuleClass && !sym.isPackageClass &&
- !sym.isJavaDefined) {
- val members = for (m <- tree.tpe.decls)
- yield m.toString() + ": " + m.tpe + ", "
- buf.append(", tpe.decls=" + members)
+ try {
+ val buf = new StringBuilder(" // sym=" + tree.symbol)
+ if (tree.hasSymbol) {
+ if (tree.symbol.isPrimaryConstructor)
+ buf.append(", isPrimaryConstructor")
+ else if (tree.symbol.isConstructor)
+ buf.append(", isConstructor")
+ if (tree.symbol != NoSymbol)
+ buf.append(", sym.owner=" + tree.symbol.owner)
+ buf.append(", sym.tpe=" + tree.symbol.tpe)
+ }
+ buf.append(", tpe=" + tree.tpe)
+ if (tree.tpe != null) {
+ var sym = tree.tpe.termSymbol
+ if (sym == NoSymbol) sym = tree.tpe.typeSymbol
+ buf.append(", tpe.sym=" + sym)
+ if (sym != NoSymbol) {
+ buf.append(", tpe.sym.owner=" + sym.owner)
+ if ((infolevel > InfoLevel.Normal) &&
+ !(sym.owner eq definitions.ScalaPackageClass) &&
+ !sym.isModuleClass && !sym.isPackageClass &&
+ !sym.isJavaDefined) {
+ val members = for (m <- tree.tpe.decls)
+ yield m.toString() + ": " + m.tpe + ", "
+ buf.append(", tpe.decls=" + members)
+ }
}
}
+ buf.toString
+ } catch {
+ case ex: Throwable =>
+ return " // sym= <error> " + ex.getMessage
}
- buf.toString
}
def nodeinfo2(tree: Tree): String =
(if (comma) "," else "") + nodeinfo(tree)
diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
new file mode 100644
index 0000000000..21e075950f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala
@@ -0,0 +1,761 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Gilles Dubochet
+ */
+
+package scala.tools.nsc
+package ast
+
+import symtab._
+import Flags._
+import scala.reflect.api.Modifier._
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.tools.nsc.util.FreshNameCreator
+import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
+
+/** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type.
+ * See more info in the comments to `reify' in scala.reflect.macro.Context.
+ *
+ * @author Martin Odersky
+ * @version 2.10
+ */
+trait Reifiers { self: Global =>
+
+ def reify(tree: Tree): Tree = {
+ class Reifier {
+ import definitions._
+ import Reifier._
+
+ final val scalaPrefix = "scala."
+ final val localPrefix = "$local"
+ final val memoizerName = "$memo"
+
+ val reifyDebug = settings.Yreifydebug.value
+
+ private val reifiableSyms = mutable.ArrayBuffer[Symbol]() // the symbols that are reified with the tree
+ private val symIndex = mutable.HashMap[Symbol, Int]() // the index of a reifiable symbol in `reifiableSyms`
+ private var boundSyms = Set[Symbol]() // set of all symbols that are bound in tree to be reified
+
+ private def definedInLiftedCode(tpe: Type) =
+ tpe exists (tp => boundSyms contains tp.typeSymbol)
+
+ private def definedInLiftedCode(sym: Symbol) =
+ boundSyms contains sym
+
+ /**
+ * Generate tree of the form
+ *
+ * { val $mr = scala.reflect.runtime.Mirror
+ * $local1 = new TypeSymbol(owner1, NoPosition, name1)
+ * ...
+ * $localN = new TermSymbol(ownerN, NoPositiion, nameN)
+ * $local1.setInfo(tpe1)
+ * ...
+ * $localN.setInfo(tpeN)
+ * $localN.setAnnotations(annotsN)
+ * rtree
+ * }
+ *
+ * where
+ *
+ * - `$localI` are free type symbols in the environment, as well as local symbols
+ * of refinement types.
+ * - `tpeI` are the info's of `symI`
+ * - `rtree` is code that generates `data` at runtime, maintaining all attributes.
+ * - `data` is typically a tree or a type.
+ */
+ def reifyTopLevel(data: Any): Tree = {
+ val rtree = reify(data)
+ Block(mirrorAlias :: reifySymbolTableSetup, rtree)
+ }
+
+ private def isLocatable(sym: Symbol) =
+ sym.isPackageClass || sym.owner.isClass || sym.isTypeParameter && sym.paramPos >= 0
+
+ private def registerReifiableSymbol(sym: Symbol): Unit =
+ if (!(symIndex contains sym)) {
+ sym.owner.ownersIterator find (x => !isLocatable(x)) foreach registerReifiableSymbol
+ symIndex(sym) = reifiableSyms.length
+ reifiableSyms += sym
+ }
+
+ // helper methods
+
+ private def localName(sym: Symbol): TermName =
+ newTermName(localPrefix + symIndex(sym))
+
+ private def call(fname: String, args: Tree*): Tree =
+ Apply(termPath(fname), args.toList)
+
+ private def mirrorSelect(name: String): Tree =
+ termPath(nme.MIRROR_PREFIX + name)
+
+ private def mirrorCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.MIRROR_PREFIX append name), args: _*)
+
+ private def mirrorCall(name: String, args: Tree*): Tree =
+ call(nme.MIRROR_PREFIX + name, args: _*)
+
+ private def mirrorFactoryCall(value: Product, args: Tree*): Tree =
+ mirrorFactoryCall(value.productPrefix, args: _*)
+
+ private def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
+ mirrorCall(prefix, args: _*)
+
+ private def scalaFactoryCall(name: String, args: Tree*): Tree =
+ call(scalaPrefix + name + ".apply", args: _*)
+
+ private def mkList(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.List", args: _*)
+
+ private def reifyModifiers(m: Modifiers) =
+ mirrorCall("modifiersFromInternalFlags", reify(m.flags), reify(m.privateWithin), reify(m.annotations))
+
+ private def reifyAggregate(name: String, args: Any*) =
+ scalaFactoryCall(name, (args map reify).toList: _*)
+
+ /**
+ * Reify a list
+ */
+ private def reifyList(xs: List[Any]): Tree =
+ mkList(xs map reify)
+
+ /**
+ * Reify an array
+ */
+ private def reifyArray(xs: Array[_]): Tree =
+ // @xeno.by: doesn't work for Array(LiteralAnnotArg(...))
+ // because we cannot generate manifests for path-dependent types
+ scalaFactoryCall(nme.Array, xs map reify: _*)
+
+ /** Reify a name */
+ private def reifyName(name: Name) =
+ mirrorCall(if (name.isTypeName) "newTypeName" else "newTermName", Literal(Constant(name.toString)))
+
+ private def isFree(sym: Symbol) =
+ !(symIndex contains sym)
+
+ /**
+ * Reify a reference to a symbol
+ */
+ private def reifySymRef(sym: Symbol): Tree = {
+ symIndex get sym match {
+ case Some(idx) =>
+ Ident(localName(sym))
+ case None =>
+ if (sym == NoSymbol)
+ mirrorSelect("NoSymbol")
+ else if (sym == RootPackage)
+ mirrorSelect("definitions.RootPackage")
+ else if (sym == RootClass)
+ mirrorSelect("definitions.RootClass")
+ else if (sym == EmptyPackage)
+ mirrorSelect("definitions.EmptyPackage")
+ else if (sym.isModuleClass)
+ Select(reifySymRef(sym.sourceModule), "moduleClass")
+ else if (sym.isStatic && sym.isClass)
+ mirrorCall("staticClass", reify(sym.fullName))
+ else if (sym.isStatic && sym.isModule)
+ mirrorCall("staticModule", reify(sym.fullName))
+ else if (isLocatable(sym))
+ if (sym.isTypeParameter)
+ mirrorCall("selectParam", reify(sym.owner), reify(sym.paramPos))
+ else {
+ if (reifyDebug) println("locatable: " + sym + " " + sym.isPackageClass + " " + sym.owner + " " + sym.isTypeParameter)
+ val rowner = reify(sym.owner)
+ val rname = reify(sym.name.toString)
+ if (sym.isType)
+ mirrorCall("selectType", rowner, rname)
+ else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) {
+ val index = sym.owner.info.decl(sym.name).alternatives indexOf sym
+ assert(index >= 0, sym)
+ mirrorCall("selectOverloadedMethod", rowner, rname, reify(index))
+ } else
+ mirrorCall("selectTerm", rowner, rname)
+ }
+ else {
+ if (sym.isTerm) {
+ if (reifyDebug) println("Free: " + sym)
+ val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false)
+ def markIfCaptured(arg: Ident): Tree =
+ if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg
+ mirrorCall("newFreeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym)))
+ } else {
+ if (reifyDebug) println("Late local: " + sym)
+ registerReifiableSymbol(sym)
+ reifySymRef(sym)
+ }
+ }
+ }
+ }
+
+ /**
+ * reify the creation of a symbol
+ */
+ private def reifySymbolDef(sym: Symbol): Tree = {
+ if (reifyDebug) println("reify sym def " + sym)
+
+ ValDef(NoMods, localName(sym), TypeTree(),
+ Apply(
+ Select(reify(sym.owner), "newNestedSymbol"),
+ List(reify(sym.name), reify(sym.pos), Literal(Constant(sym.flags)))
+ )
+ )
+ }
+
+ /**
+ * Generate code to add type and annotation info to a reified symbol
+ */
+ private def fillInSymbol(sym: Symbol): Tree = {
+ val rset = Apply(Select(reifySymRef(sym), nme.setTypeSignature), List(reifyType(sym.info)))
+ if (sym.annotations.isEmpty) rset
+ else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations)))
+ }
+
+ /** Reify a scope */
+ private def reifyScope(scope: Scope): Tree = {
+ scope foreach registerReifiableSymbol
+ mirrorCall(nme.newScopeWith, scope.toList map reifySymRef: _*)
+ }
+
+ /** Reify a list of symbols that need to be created */
+ private def reifySymbols(syms: List[Symbol]): Tree = {
+ syms foreach registerReifiableSymbol
+ mkList(syms map reifySymRef)
+ }
+
+ /** Reify a type that defines some symbols */
+ private def reifyTypeBinder(value: Product, bound: List[Symbol], underlying: Type): Tree =
+ mirrorFactoryCall(value, reifySymbols(bound), reify(underlying))
+
+ /** Reify a type */
+ private def reifyType(tpe0: Type): Tree = {
+ val tpe = tpe0.normalize
+
+ if (tpe.isErroneous)
+ CannotReifyErroneousType(tpe)
+ if (definedInLiftedCode(tpe))
+ CannotReifyTypeInvolvingBoundType(tpe)
+
+ val tsym = tpe.typeSymbol
+ if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
+ Select(reifySymRef(tpe.typeSymbol), nme.asTypeConstructor)
+ else tpe match {
+ case t @ NoType =>
+ reifyMirrorObject(t)
+ case t @ NoPrefix =>
+ reifyMirrorObject(t)
+ case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
+ mirrorCall(nme.thisModuleType, reify(clazz.fullName))
+ case t @ RefinedType(parents, decls) =>
+ registerReifiableSymbol(tpe.typeSymbol)
+ mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
+ case t @ ClassInfoType(parents, decls, clazz) =>
+ registerReifiableSymbol(clazz)
+ mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
+ case t @ ExistentialType(tparams, underlying) =>
+ reifyTypeBinder(t, tparams, underlying)
+ case t @ PolyType(tparams, underlying) =>
+ reifyTypeBinder(t, tparams, underlying)
+ case t @ MethodType(params, restpe) =>
+ reifyTypeBinder(t, params, restpe)
+ case t @ AnnotatedType(anns, underlying, selfsym) =>
+ val saved1 = reifySymbols
+ val saved2 = reifyTypes
+
+ try {
+ // one more quirk of reifying annotations
+ //
+ // when reifying AnnotatedTypes we need to reify all the types and symbols of inner ASTs
+ // that's because a lot of logic expects post-typer trees to have non-null tpes
+ //
+ // Q: reified trees are pre-typer, so there's shouldn't be a problem.
+ // reflective typechecker will fill in missing symbols and types, right?
+ // A: actually, no. annotation ASTs live inside AnnotatedTypes,
+ // and insides of the types is the place where typechecker doesn't look.
+ reifySymbols = true
+ reifyTypes = true
+ if (reifyDebug) println("reify AnnotatedType: " + tpe)
+ reifyProductUnsafe(tpe)
+ } finally {
+ reifySymbols = saved1
+ reifyTypes = saved2
+ }
+ case _ =>
+ reifyProductUnsafe(tpe)
+ }
+ }
+
+ var reifySymbols = false
+ var reifyTypes = false
+
+ /** Preprocess a tree before reification */
+ private def trimTree(tree: Tree): Tree = {
+ def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]) = {
+ var stats1 = stats filterNot (stat => stat.isDef && {
+ if (stat.symbol.isCaseAccessorMethod && reifyDebug) println("discarding case accessor method: " + stat)
+ stat.symbol.isCaseAccessorMethod
+ })
+ stats1 = stats1 filterNot (memberDef => memberDef.isDef && {
+ val isSynthetic = memberDef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes
+// val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass
+ val isCaseMember = true
+ if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef)
+ isSynthetic && isCaseMember
+ })
+ stats1 = stats1 map {
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isCaseAccessor =>
+ if (reifyDebug) println("resetting visibility of case accessor field: " + valdef)
+ val Modifiers(flags, privateWithin, annotations) = mods
+ val flags1 = flags & ~Flags.LOCAL & ~Flags.PRIVATE
+ val mods1 = Modifiers(flags1, privateWithin, annotations)
+ ValDef(mods1, name, tpt, rhs).copyAttrs(valdef)
+ case stat =>
+ stat
+ }
+ stats1
+ }
+
+ def trimSyntheticCaseClassCompanions(stats: List[Tree]) =
+ stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => {
+ val isSynthetic = moddef.symbol.isSynthetic
+ // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes
+// val isCaseCompanion = moddef.symbol.companionClass.isCaseClass
+ val isCaseCompanion = true
+ // @xeno.by: we also have to do this ugly hack for the very same reason described above
+ // normally this sort of stuff is performed in reifyTree, which binds related symbols, however, local companions will be out of its reach
+ if (reifyDebug) println("boundSym: "+ moddef.symbol)
+ boundSyms += moddef.symbol
+ if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef)
+ isSynthetic && isCaseCompanion
+ }))
+
+ tree match {
+ case tree if tree.isErroneous =>
+ tree
+ case ta @ TypeApply(hk, ts) =>
+ def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null
+ val discard = ts collect { case tt: TypeTree => tt } exists isErased
+ if (reifyDebug && discard) println("discarding TypeApply: " + tree)
+ if (discard) hk else ta
+ case classDef @ ClassDef(mods, name, params, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(classDef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ClassDef(mods, name, params, impl1).copyAttrs(classDef)
+ case moduledef @ ModuleDef(mods, name, impl) =>
+ val Template(parents, self, body) = impl
+ val body1 = trimSyntheticCaseClassMembers(moduledef, body)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ModuleDef(mods, name, impl1).copyAttrs(moduledef)
+ case template @ Template(parents, self, body) =>
+ val body1 = trimSyntheticCaseClassCompanions(body)
+ Template(parents, self, body1).copyAttrs(template)
+ case block @ Block(stats, expr) =>
+ val stats1 = trimSyntheticCaseClassCompanions(stats)
+ Block(stats1, expr).copyAttrs(block)
+ case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isLazy =>
+ if (reifyDebug) println("dropping $lzy in lazy val's name: " + tree)
+ val name1 = if (name endsWith nme.LAZY_LOCAL) name dropRight nme.LAZY_LOCAL.length else name
+ ValDef(mods, name1, tpt, rhs).copyAttrs(valdef)
+ case unapply @ UnApply(fun, args) =>
+ def extractExtractor(tree: Tree): Tree = {
+ val Apply(fun, args) = tree
+ args match {
+ case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
+ val Select(extractor, flavor) = fun
+ assert(flavor == nme.unapply || flavor == nme.unapplySeq)
+ extractor
+ case _ =>
+ extractExtractor(fun)
+ }
+ }
+
+ if (reifyDebug) println("unapplying unapply: " + tree)
+ val fun1 = extractExtractor(fun)
+ Apply(fun1, args).copyAttrs(unapply)
+ case _ =>
+ tree
+ }
+ }
+
+ /** Reify a tree */
+ private def reifyTree(tree0: Tree): Tree = {
+ val tree = trimTree(tree0)
+
+ var rtree = tree match {
+ case tree if tree.isErroneous =>
+ CannotReifyErroneousTree(tree)
+ case self.EmptyTree =>
+ reifyMirrorObject(EmptyTree)
+ case self.emptyValDef =>
+ mirrorSelect(nme.emptyValDef)
+ case This(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) =>
+ reifyFree(tree)
+ case Ident(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) =>
+ if (tree.symbol.isVariable && tree.symbol.owner.isTerm) {
+ if (reifyDebug) println("captured variable: " + tree.symbol)
+ captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here.
+ mirrorCall("Select", reifyFree(tree), reifyName(nme.elem))
+ } else reifyFree(tree)
+ case tt: TypeTree if (tt.tpe != null) =>
+ reifyTypeTree(tt)
+ case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) =>
+ CannotReifyClassOfBoundType(tree, tpe)
+ case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym =>
+ CannotReifyClassOfBoundEnum(tree, constant.tpe)
+ case tree if tree.isDef =>
+ if (reifyDebug) println("boundSym: %s of type %s".format(tree.symbol, (tree.productIterator.toList collect { case tt: TypeTree => tt } headOption).getOrElse(TypeTree(tree.tpe))))
+ boundSyms += tree.symbol
+
+ bindRelatedSymbol(tree.symbol.sourceModule, "sourceModule")
+ bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass")
+ bindRelatedSymbol(tree.symbol.companionClass, "companionClass")
+ bindRelatedSymbol(tree.symbol.companionModule, "companionModule")
+ Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") }
+ def bindRelatedSymbol(related: Symbol, name: String): Unit =
+ if (related != null && related != NoSymbol) {
+ if (reifyDebug) println("boundSym (" + name + "): " + related)
+ boundSyms += related
+ }
+
+ val prefix = tree.productPrefix
+ val elements = (tree.productIterator map {
+ // annotations exist in two flavors:
+ // 1) pre-typer ones that populate: a) Modifiers, b) Annotated nodes (irrelevant in this context)
+ // 2) post-typer ones that dwell inside: a) sym.annotations, b) AnnotatedTypes (irrelevant in this context)
+ //
+ // here we process Modifiers that are involved in deftrees
+ // AnnotatedTypes get reified elsewhere (currently, in ``reifyTypeTree'')
+ case Modifiers(flags, privateWithin, annotations) =>
+ assert(annotations.isEmpty) // should've been eliminated by the typer
+ val postTyper = tree.symbol.annotations filter (_.original != EmptyTree)
+ if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for %s: %s".format(tree.symbol, tree.symbol.annotations))
+ val preTyper = postTyper map toPreTyperAnnotation
+ Modifiers(flags, privateWithin, preTyper)
+ case x =>
+ x
+ }).toList
+ reifyProduct(prefix, elements)
+ case _ =>
+ reifyProduct(tree)
+ }
+
+ // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
+ // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
+ if (reifySymbols && tree.hasSymbol) {
+ if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
+ rtree = Apply(Select(rtree, nme.setSymbol), List(reifySymRef(tree.symbol)))
+ }
+ if (reifyTypes && tree.tpe != null) {
+ if (reifyDebug) println("reifying type %s for tree %s".format(tree.tpe, tree))
+ rtree = Apply(Select(rtree, nme.setType), List(reifyType(tree.tpe)))
+ }
+
+ rtree
+ }
+
+ /** Reify pre-typer representation of a type.
+ *
+ * NB: This is the trickiest part of reification!
+ *
+ * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * then we cannot reify it, or otherwise subsequent reflective compilation will fail.
+ *
+ * Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
+ * so naively reified symbols will become out of sync, which brings really funny compilation errors and/or crashes, e.g.:
+ * https://issues.scala-lang.org/browse/SI-5230
+ *
+ * To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
+ * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
+ * In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
+ *
+ * An important property of the original is that it isn't just a pre-typer tree.
+ * It's actually kind of a post-typer tree with symbols assigned to its Idents (e.g. Ident("List") will contain a symbol that points to immutable.this.List).
+ * This is very important, since subsequent reflective compilation won't have to resolve these symbols.
+ * In general case, such resolution cannot be performed, since reification doesn't preserve lexical context,
+ * which means that reflective compilation won't be aware of, say, imports that were provided when the reifee has been compiled.
+ *
+ * This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked.
+ * Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees.
+ * To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless.
+ * This is laboriously worked around in the code below. I hope this will be the only workaround in this department.
+ */
+ private def reifyTypeTree(tt: TypeTree): Tree = {
+ if (definedInLiftedCode(tt.tpe)) {
+ if (reifyDebug) println("reifyTypeTree, defined in lifted code: " + tt.tpe)
+ if (tt.original != null) {
+ val annotations = tt.tpe filter { _.isInstanceOf[AnnotatedType] } collect { case atp: AnnotatedType => atp.annotations } flatten
+ val annmap = annotations map { ann => (ann.original, ann) } toMap
+
+ // annotations exist in two flavors:
+ // 1) pre-typer ones that populate: a) Modifiers (irrelevant in this context), b) Annotated nodes
+ // 2) post-typer ones that dwell inside: a) sym.annotations (irrelevant in this context), b) AnnotatedTypes
+ //
+ // here we process AnnotatedTypes, since only they can be involved in TypeTrees
+ // Modifiers get reified elsewhere (currently, in the "isDef" case of ``reifyTree'')
+ //
+ // the problem with annotations is that their originals don't preserve any symbols at all
+ // read the comment to this method to find out why it's bad
+ // that's why we transplant typechecked, i.e. symful, annotations onto original trees
+ class AnnotationFixup extends self.Transformer {
+ override def transform(tree: Tree) = tree match {
+ case Annotated(ann0, args) =>
+ assert(annmap contains ann0)
+ val ann1 = annmap(ann0)
+ val ann = toPreTyperAnnotation(ann1)
+ Annotated(ann, transform(args))
+ case _ =>
+ tree
+ }
+ }
+
+ if (reifyDebug) println("verdict: essential, reify as original")
+ val patchedOriginal = new AnnotationFixup().transform(tt.original)
+ reifyTree(patchedOriginal)
+ } else {
+ // type is deemed to be non-essential
+ // erase it and hope that subsequent reflective compilation will be able to recreate it again
+ if (reifyDebug) println("verdict: non-essential, discard")
+ mirrorCall("TypeTree")
+ }
+ } else {
+ var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe))
+ // @xeno.by: temporarily disabling reification of originals
+ // subsequent reflective compilation will try to typecheck them
+ // and this means that the reifier has to do additional efforts to ensure that this will succeed
+ // additional efforts + no clear benefit = will be implemented later
+// if (tt.original != null) {
+// val setOriginal = Select(rtt, newTermName("setOriginal"))
+// val reifiedOriginal = reify(tt.original)
+// rtt = Apply(setOriginal, List(reifiedOriginal))
+// }
+ rtt
+ }
+ }
+
+ /** Reify post-typer representation of an annotation */
+ private def reifyAnnotation(ann: AnnotationInfo): Tree =
+ // @xeno.by: if you reify originals, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
+ mirrorFactoryCall("AnnotationInfo", reifyType(ann.atp), reifyList(ann.args), reify(ann.assocs))
+
+ /** Reify pre-typer representation of an annotation.
+ * The trick here is to retain the symbols that have been populated during typechecking of the annotation.
+ * If we do not do that, subsequent reflective compilation will fail.
+ */
+ private def toPreTyperAnnotation(ann: AnnotationInfo): Tree = {
+ if (definedInLiftedCode(ann.atp)) {
+ // todo. deconstruct reifiable tree from ann.original and ann.args+ann.assocs
+ //
+ // keep in mind that we can't simply use ann.original, because its args are symless
+ // which means that any imported symbol (e.g. List) will crash subsequent reflective compilation
+ // hint: if I had enough time, I'd try to extract reifiable annotation type from ann.original
+ // and to apply its constructor to ann.args (that are symful, i.e. suitable for reification)
+ //
+ // also, if we pursue the route of reifying annotations defined in lifted code
+ // we should think about how to provide types for all nodes of the return value
+ // this will be necessary for reifying AnnotatedTypes, since ASTs inside ATs must all have non-null tpes
+ // an alternative would be downgrading ATs to Annotated nodes, but this needs careful thinking
+ // for now I just leave this as an implementation restriction
+ CannotReifyAnnotationInvolvingBoundType(ann)
+ } else {
+ val args = if (ann.assocs.isEmpty) {
+ ann.args
+ } else {
+ def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match {
+ case LiteralAnnotArg(const) =>
+ Literal(const)
+ case ArrayAnnotArg(arr) =>
+ Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
+ case NestedAnnotArg(ann) =>
+ toPreTyperAnnotation(ann)
+ }
+
+ ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
+ }
+
+ New(TypeTree(ann.atp), List(args))
+ }
+ }
+
+ /**
+ * Reify a free reference. The result will be either a mirror reference
+ * to a global value, or else a mirror Literal.
+ */
+ private def reifyFree(tree: Tree): Tree = tree match {
+ case This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass =>
+ val sym = tree.symbol
+ if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
+ if (reifyDebug) println("Free: " + sym)
+ val freeVar = mirrorCall("newFreeVar", reify(sym.name.toString), reify(sym.tpe), This(sym))
+ mirrorCall(nme.Ident, freeVar)
+ case This(_) =>
+ if (reifyDebug) println("This for %s, reified as This".format(tree.symbol))
+ mirrorCall(nme.This, reifySymRef(tree.symbol))
+ case _ =>
+ mirrorCall(nme.Ident, reifySymRef(tree.symbol))
+ }
+
+ // todo: consider whether we should also reify positions
+ private def reifyPosition(pos: Position): Tree =
+ reifyMirrorObject(NoPosition)
+
+ // !!! we must eliminate these casts.
+ private def reifyProductUnsafe(x: Any): Tree =
+ if (x.isInstanceOf[Product]) reifyProduct(x.asInstanceOf[Product])
+ else throw new Exception("%s of type %s cannot be cast to Product".format(x, x.getClass))
+ private def reifyProduct(x: Product): Tree =
+ reifyProduct(x.productPrefix, x.productIterator.toList)
+ private def reifyProduct(prefix: String, elements: List[Any]): Tree = {
+ // @xeno.by: reflection would be more robust, but, hey, this is a hot path
+ if (prefix.startsWith("Tuple")) reifyAggregate(prefix, elements: _*)
+ else mirrorCall(prefix, (elements map reify): _*)
+ }
+
+ /**
+ * Reify a case object defined in Mirror
+ */
+ private def reifyMirrorObject(name: String): Tree = mirrorSelect(name)
+ private def reifyMirrorObject(x: Product): Tree = reifyMirrorObject(x.productPrefix)
+
+ private def isReifiableConstant(value: Any) = value match {
+ case null => true // seems pretty reifable to me?
+ case _: String => true
+ case _ => isAnyVal(value)
+ }
+
+ /** Reify an arbitary value */
+ private def reify(value: Any): Tree = value match {
+ case tree: Tree => reifyTree(tree)
+ case sym: Symbol => reifySymRef(sym)
+ case tpe: Type => reifyType(tpe)
+ case xs: List[_] => reifyList(xs)
+ case xs: Array[_] => reifyArray(xs)
+ case scope: Scope => reifyScope(scope)
+ case x: Name => reifyName(x)
+ case x: Position => reifyPosition(x)
+ case x: Modifiers => reifyModifiers(x)
+ case x: AnnotationInfo => reifyAnnotation(x)
+ case _ =>
+ if (isReifiableConstant(value)) Literal(Constant(value))
+ else reifyProductUnsafe(value)
+ }
+
+ /**
+ * An (unreified) path that refers to definition with given fully qualified name
+ * @param mkName Creator for last portion of name (either TermName or TypeName)
+ */
+ private def path(fullname: String, mkName: String => Name): Tree = {
+ val parts = fullname split "\\."
+ val prefixParts = parts.init
+ val lastName = mkName(parts.last)
+ if (prefixParts.isEmpty) Ident(lastName)
+ else {
+ val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _))
+ Select(prefixTree, lastName)
+ }
+ }
+
+ /** An (unreified) path that refers to term definition with given fully qualified name */
+ private def termPath(fullname: String): Tree = path(fullname, newTermName)
+
+ /** An (unreified) path that refers to type definition with given fully qualified name */
+ private def typePath(fullname: String): Tree = path(fullname, newTypeName)
+
+ private def mirrorAlias =
+ ValDef(NoMods, nme.MIRROR_SHORT, SingletonTypeTree(termPath(fullnme.MirrorPackage)), termPath(fullnme.MirrorPackage))
+
+ /**
+ * Generate code that generates a symbol table of all symbols registered in `reifiableSyms`
+ */
+ private def reifySymbolTableSetup: List[Tree] = {
+ val symDefs, fillIns = new mutable.ArrayBuffer[Tree]
+ var i = 0
+ while (i < reifiableSyms.length) {
+ // fillInSymbol might create new reifiableSyms, that's why this is done iteratively
+ symDefs += reifySymbolDef(reifiableSyms(i))
+ fillIns += fillInSymbol(reifiableSyms(i))
+ i += 1
+ }
+
+ symDefs.toList ++ fillIns.toList
+ }
+ } // end of Reifier
+
+ object Reifier {
+ def CannotReifyPreTyperTree(tree: Tree) = {
+ val msg = "pre-typer trees are not supported, consider typechecking the tree before passing it to the reifier"
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyErroneousTree(tree: Tree) = {
+ val msg = "erroneous trees are not supported, make sure that your tree typechecks successfully before passing it to the reifier"
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyErroneousType(tpe: Type) = {
+ val msg = "erroneous types are not supported, make sure that your tree typechecks successfully before passing it to the reifier"
+ throw new ReifierError(NoPosition, msg)
+ }
+
+ def CannotReifyClassOfBoundType(tree: Tree, tpe: Type) = {
+ val msg = "implementation restriction: cannot reify classOf[%s] which refers to a type declared inside the block being reified".format(tpe)
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyClassOfBoundEnum(tree: Tree, tpe: Type) = {
+ val msg = "implementation restriction: cannot reify classOf[%s] which refers to an enum declared inside the block being reified".format(tpe)
+ throw new ReifierError(tree.pos, msg)
+ }
+
+ def CannotReifyTypeInvolvingBoundType(tpe: Type) = {
+ val msg = "implementation restriction: cannot reify type %s which involves a symbol declared inside the block being reified".format(tpe)
+ throw new ReifierError(NoPosition, msg)
+ }
+
+ def CannotReifyAnnotationInvolvingBoundType(ann: AnnotationInfo) = {
+ val msg = "implementation restriction: cannot reify annotation @%s which involves a symbol declared inside the block being reified".format(ann)
+ throw new ReifierError(ann.original.pos, msg)
+ }
+ } // end of Reifier
+
+ // begin reify
+ import Reifier._
+ if (tree.tpe != null) {
+ val saved = printTypings
+ try {
+ val reifyDebug = settings.Yreifydebug.value
+ val debugTrace = util.trace when reifyDebug
+ debugTrace("transforming = ")(if (settings.Xshowtrees.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ debugTrace("transformed = ") {
+ val reifier = new Reifier()
+ val untyped = reifier.reifyTopLevel(tree)
+
+ val reifyCopypaste = settings.Yreifycopypaste.value
+ if (reifyCopypaste) {
+ if (reifyDebug) println("=======================")
+ println(reifiedNodeToString(untyped))
+ if (reifyDebug) println("=======================")
+ }
+
+ untyped
+ }
+ } finally {
+ printTypings = saved
+ }
+ } else {
+ CannotReifyPreTyperTree(tree)
+ }
+ }
+
+ /** A throwable signalling a reification error */
+ class ReifierError(var pos: Position, val msg: String) extends Throwable(msg) {
+ def this(msg: String) = this(NoPosition, msg)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala
new file mode 100644
index 0000000000..fce59bb099
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala
@@ -0,0 +1,75 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package ast
+
+import compat.Platform.EOL
+import symtab._
+import Flags._
+
+trait ReifyPrinters { self: NodePrinters =>
+
+ val global: Global
+ import global._
+
+ object reifiedNodeToString extends Function1[Tree, String] {
+ def apply(tree: Tree): String = {
+ import scala.reflect.api.Modifier
+
+ // @PP: I fervently hope this is a test case or something, not anything being
+ // depended upon. Of more fragile code I cannot conceive.
+ // @eb: This stuff is only needed to debug-print out reifications in human-readable format
+ // Rolling a full-fledged, robust TreePrinter would be several times more code.
+ (for (line <- (tree.toString.split(EOL) drop 2 dropRight 1)) yield {
+ var s = line.trim
+ s = s.replace("$mr.", "")
+ s = s.replace(".apply", "")
+ s = s.replace("scala.collection.immutable.", "")
+ s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
+ s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
+ s = s.replace("immutable.this.Nil", "List()")
+ s = s.replace("modifiersFromInternalFlags", "Modifiers")
+ s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ val buf = new collection.mutable.ListBuffer[String]
+
+ val annotations = m.group(3)
+ if (buf.nonEmpty || annotations.nonEmpty)
+ buf.append("List(" + annotations + ")")
+
+ val privateWithin = "" + m.group(2)
+ if (buf.nonEmpty || privateWithin != "")
+ buf.append("newTypeName(\"" + privateWithin + "\")")
+
+ val flags = m.group(1).toLong
+ val s_flags = Flags.modifiersOfFlags(flags) map (_.sourceString) mkString ", "
+ if (buf.nonEmpty || s_flags != "")
+ buf.append("Set(" + s_flags + ")")
+
+ "Modifiers(" + buf.reverse.mkString(", ") + ")"
+ })
+ s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => {
+ val flags = m.group(1).toLong
+ val mods = Flags.modifiersOfFlags(flags) map (_.sourceString)
+ "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))"
+ })
+
+ s
+ }) mkString EOL
+ }
+ }
+
+
+ def printReifyCopypaste(tree: Tree) {
+ val reifyDebug = settings.Yreifydebug.value
+ if (reifyDebug) println("=======================")
+ printReifyCopypaste1(tree)
+ if (reifyDebug) println("=======================")
+ }
+
+ def printReifyCopypaste1(tree: Tree) {
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 752e3c6699..c1d6c1a4d4 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -33,17 +33,16 @@ abstract class TreeBrowsers {
val borderSize = 10
-
def create(): SwingBrowser = new SwingBrowser();
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
- override def toString(): String = "Program"
+ override def toString: String = "Program"
}
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class UnitTree(unit: CompilationUnit) extends Tree {
- override def toString(): String = unit.toString()
+ override def toString: String = unit.toString
}
/**
@@ -360,7 +359,7 @@ abstract class TreeBrowsers {
("Program", EMPTY)
case UnitTree(unit) =>
- ("CompilationUnit", unit.toString)
+ ("CompilationUnit", newTermName("" + unit))
case DocDef(comment, definition) =>
("DocDef", EMPTY)
@@ -441,7 +440,7 @@ abstract class TreeBrowsers {
("Apply", EMPTY)
case Super(qualif, mix) =>
- ("Super", "mix: " + mix)
+ ("Super", newTermName("mix: " + mix))
case This(qualifier) =>
("This", qualifier)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index efc64dbbc5..2cfd21ecc8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -202,7 +202,7 @@ trait TreeDSL {
class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
def symType = sym.tpe.finalResultType
def tparams = sym.typeParams map TypeDef
- def vparamss = sym.paramss map (xs => xs map ValDef)
+ def vparamss = mapParamss(sym)(ValDef)
}
class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
def symType = sym.tpe
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 0dc3b1fffd..265d017653 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -30,6 +30,25 @@ abstract class TreeGen extends reflect.internal.TreeGen {
else
tree
}
+
+ /** Builds a fully attributed wildcard import node.
+ */
+ def mkWildcardImport(pkg: Symbol): Import = {
+ assert(pkg ne null, this)
+ val qual = gen.mkAttributedStableRef(pkg)
+ val importSym = (
+ NoSymbol
+ newImport NoPosition
+ setFlag SYNTHETIC
+ setInfo analyzer.ImportType(qual)
+ )
+ val importTree = (
+ Import(qual, List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ setSymbol importSym
+ setType NoType
+ )
+ importTree
+ }
// wrap the given expression in a SoftReference so it can be gc-ed
def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
@@ -77,17 +96,17 @@ abstract class TreeGen extends reflect.internal.TreeGen {
}
def mkModuleVarDef(accessor: Symbol) = {
+ val inClass = accessor.owner.isClass
+ val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
+
val mval = (
- accessor.owner.newVariable(accessor.pos.focus, nme.moduleVarName(accessor.name))
- setInfo accessor.tpe.finalResultType
- setFlag (MODULEVAR)
+ accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
+ setInfo accessor.tpe.finalResultType
+ addAnnotation VolatileAttr
)
+ if (inClass)
+ mval.owner.info.decls enter mval
- mval addAnnotation VolatileAttr
- if (mval.owner.isClass) {
- mval setFlag (PrivateLocal | SYNTHETIC)
- mval.owner.info.decls.enter(mval)
- }
ValDef(mval)
}
@@ -128,7 +147,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkManifestFactoryCall(full: Boolean, constructor: String, tparg: Type, args: List[Tree]): Tree =
mkMethodCall(
if (full) FullManifestModule else PartialManifestModule,
- constructor,
+ newTermName(constructor),
List(tparg),
args
)
@@ -161,16 +180,10 @@ abstract class TreeGen extends reflect.internal.TreeGen {
* apply the element type directly.
*/
def mkWrapArray(tree: Tree, elemtp: Type) = {
- val sym = elemtp.typeSymbol
- val meth: Name =
- if (isValueClass(sym)) "wrap"+sym.name+"Array"
- else if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(sym)) "wrapRefArray"
- else "genericWrapArray"
-
mkMethodCall(
PredefModule,
- meth,
- if (isValueClass(sym)) Nil else List(elemtp),
+ wrapArrayMethodName(elemtp),
+ if (isScalaValueType(elemtp)) Nil else List(elemtp),
List(tree)
)
}
@@ -179,8 +192,8 @@ abstract class TreeGen extends reflect.internal.TreeGen {
* elem type elemtp to expected type pt.
*/
def mkCastArray(tree: Tree, elemtp: Type, pt: Type) =
- if (elemtp.typeSymbol == AnyClass && isValueClass(tree.tpe.typeArgs.head.typeSymbol))
- mkCast(mkRuntimeCall("toObjectArray", List(tree)), pt)
+ if (elemtp.typeSymbol == AnyClass && isScalaValueType(tree.tpe.typeArgs.head))
+ mkCast(mkRuntimeCall(nme.toObjectArray, List(tree)), pt)
else
mkCast(tree, pt)
@@ -205,11 +218,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
*/
private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
val packedType = typer.packedType(expr, owner)
- val sym = (
- owner.newValue(expr.pos.makeTransparent, name)
- setFlag SYNTHETIC
- setInfo packedType
- )
+ val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
(ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index 5c3071739c..3371353f25 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -51,14 +51,11 @@ trait TreePrinters extends reflect.internal.TreePrinters { this: Global =>
treePrinter.println()
treePrinter.print(definition)
- case AssignOrNamedArg(lhs, rhs) =>
- treePrinter.print(lhs, " = ", rhs)
-
case TypeTreeWithDeferredRefCheck() =>
treePrinter.print("<tree with deferred refcheck>")
case SelectFromArray(qualifier, name, _) =>
- treePrinter.print(qualifier, ".<arr>", treePrinter.symName(tree, name))
+ treePrinter.print(qualifier, ".<arr>", symName(tree, name))
case _ =>
super.xprintTree(treePrinter, tree)
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index f3eaff8db0..855b55bb5e 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -17,7 +17,6 @@ import scala.reflect.internal.Flags.TRAIT
trait Trees extends reflect.internal.Trees { self: Global =>
// --- additional cases --------------------------------------------------------
-
/** Only used during parsing */
case class Parens(args: List[Tree]) extends Tree
@@ -31,20 +30,22 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override def isType = definition.isType
}
-
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply)
- */
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
-
/** Array selection <qualifier> . <name> only used during erasure */
case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
- extends TermTree with RefTree { }
+ extends TermTree with RefTree
/** emitted by typer, eliminated by refchecks */
case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree
+ /** Marks underlying reference to id as boxed.
+ * @pre: id must refer to a captured variable
+ * A reference such marked will refer to the boxed entity, no dereferencing
+ * with `.elem` is done on it.
+ * This tree node can be emitted by macros such as reify that call markBoxedReference.
+ * It is eliminated in LambdaLift, where the boxing conversion takes place.
+ */
+ case class ReferenceToBoxed(idt: Ident) extends TermTree
+
// --- factory methods ----------------------------------------------------------
/** Generates a template with constructor corresponding to
@@ -77,16 +78,17 @@ trait Trees extends reflect.internal.Trees { self: Global =>
}})
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val (lvdefs, gvdefs) = evdefs map {
+ val gvdefs = evdefs map {
case vdef @ ValDef(mods, name, tpt, rhs) =>
- val fld = treeCopy.ValDef(
+ treeCopy.ValDef(
vdef.duplicate, mods, name,
atPos(focusPos(vdef.pos)) { TypeTree() setOriginal tpt setPos focusPos(tpt.pos) }, // atPos in case
EmptyTree)
- val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
- (local, fld)
- } unzip
-
+ }
+ val lvdefs = evdefs map {
+ case vdef @ ValDef(mods, name, tpt, rhs) =>
+ treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
+ }
val constrs = {
if (constrMods hasFlag TRAIT) {
if (body forall treeInfo.isInterfaceMember) List()
@@ -147,19 +149,19 @@ trait Trees extends reflect.internal.Trees { self: Global =>
traverser.traverseTrees(ts)
case DocDef(comment, definition) =>
traverser.traverse(definition)
- case AssignOrNamedArg(lhs, rhs) =>
- traverser.traverse(lhs); traverser.traverse(rhs)
case SelectFromArray(qualifier, selector, erasure) =>
traverser.traverse(qualifier)
- case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
+ case ReferenceToBoxed(idt) =>
+ traverser.traverse(idt)
+ case TypeTreeWithDeferredRefCheck() =>
// (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
case _ => super.xtraverse(traverser, tree)
}
trait TreeCopier extends super.TreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
+ def ReferenceToBoxed(tree: Tree, idt: Ident): ReferenceToBoxed
def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
}
@@ -169,10 +171,10 @@ trait Trees extends reflect.internal.Trees { self: Global =>
class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier {
def DocDef(tree: Tree, comment: DocComment, definition: Tree) =
new DocDef(comment, definition).copyAttrs(tree)
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
- new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
+ def ReferenceToBoxed(tree: Tree, idt: Ident) =
+ new ReferenceToBoxed(idt).copyAttrs(tree)
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
}
@@ -184,16 +186,16 @@ trait Trees extends reflect.internal.Trees { self: Global =>
if (comment0 == comment) && (definition0 == definition) => t
case _ => this.treeCopy.DocDef(tree, comment, definition)
}
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
- case t @ AssignOrNamedArg(lhs0, rhs0)
- if (lhs0 == lhs) && (rhs0 == rhs) => t
- case _ => this.treeCopy.AssignOrNamedArg(tree, lhs, rhs)
- }
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) = tree match {
case t @ SelectFromArray(qualifier0, selector0, _)
if (qualifier0 == qualifier) && (selector0 == selector) => t
case _ => this.treeCopy.SelectFromArray(tree, qualifier, selector, erasure)
}
+ def ReferenceToBoxed(tree: Tree, idt: Ident) = tree match {
+ case t @ ReferenceToBoxed(idt0)
+ if (idt0 == idt) => t
+ case _ => this.treeCopy.ReferenceToBoxed(tree, idt)
+ }
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case t @ TypeTreeWithDeferredRefCheck() => t
case _ => this.treeCopy.TypeTreeWithDeferredRefCheck(tree)
@@ -205,7 +207,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
try unit.body = transform(unit.body)
catch {
case ex: Exception =>
- println("unhandled exception while transforming "+unit)
+ println(supplementErrorMessage("unhandled exception while transforming "+unit))
throw ex
}
}
@@ -214,11 +216,12 @@ trait Trees extends reflect.internal.Trees { self: Global =>
override protected def xtransform(transformer: super.Transformer, tree: Tree): Tree = tree match {
case DocDef(comment, definition) =>
transformer.treeCopy.DocDef(tree, comment, transformer.transform(definition))
- case AssignOrNamedArg(lhs, rhs) =>
- transformer.treeCopy.AssignOrNamedArg(tree, transformer.transform(lhs), transformer.transform(rhs))
case SelectFromArray(qualifier, selector, erasure) =>
transformer.treeCopy.SelectFromArray(
tree, transformer.transform(qualifier), selector, erasure)
+ case ReferenceToBoxed(idt) =>
+ transformer.treeCopy.ReferenceToBoxed(
+ tree, transformer.transform(idt) match { case idt1: Ident => idt1 })
case TypeTreeWithDeferredRefCheck() =>
transformer.treeCopy.TypeTreeWithDeferredRefCheck(tree)
}
@@ -230,62 +233,97 @@ trait Trees extends reflect.internal.Trees { self: Global =>
}
}
- /** resets symbol and tpe fields in a tree, @see ResetAttrsTraverse
+ /** resets symbol and tpe fields in a tree, @see ResetAttrs
*/
// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- def resetAllAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(false).transformPoly(x)
- def resetLocalAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(true).transformPoly(x)
+
+ def resetAllAttrs[A<:Tree](x:A): A = new ResetAttrs(false).transform(x)
+ def resetLocalAttrs[A<:Tree](x:A): A = new ResetAttrs(true).transform(x)
/** A transformer which resets symbol and tpe fields of all nodes in a given tree,
* with special treatment of:
* TypeTree nodes: are replaced by their original if it exists, otherwise tpe field is reset
* to empty if it started out empty or refers to local symbols (which are erased).
* TypeApply nodes: are deleted if type arguments end up reverted to empty
- * This(pkg) notes where pkg is a pckage: these are kept.
+ * This(pkg) nodes where pkg is a package: these are kept.
*
- * (bq:) This traverser has mutable state and should be discarded after use
+ * (bq:) This transformer has mutable state and should be discarded after use
*/
- private class ResetAttrsTransformer(localOnly: Boolean) extends Transformer {
- private val erasedSyms = util.HashSet[Symbol](8)
- private def resetDef(tree: Tree) {
- if (tree.symbol != null && tree.symbol != NoSymbol)
- erasedSyms addEntry tree.symbol
- tree.symbol = NoSymbol
+ private class ResetAttrs(localOnly: Boolean) {
+ val debug = settings.debug.value
+ val trace = scala.tools.nsc.util.trace when debug
+
+ val locals = util.HashSet[Symbol](8)
+ val orderedLocals = collection.mutable.ListBuffer[Symbol]()
+ def registerLocal(sym: Symbol) {
+ if (sym != null && sym != NoSymbol) {
+ if (debug && !(locals contains sym)) orderedLocals append sym
+ locals addEntry sym
+ }
}
- override def transform(tree: Tree): Tree = super.transform {
- tree match {
- case Template(_, _, body) =>
- body foreach resetDef
- resetDef(tree)
- tree.tpe = null
- tree
- case _: DefTree | Function(_, _) | Template(_, _, _) =>
- resetDef(tree)
- tree.tpe = null
- tree
- case tpt: TypeTree =>
- if (tpt.original != null)
- tpt.original
- else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => erasedSyms contains tp.typeSymbol))))
- tpt.tpe = null
- tree
- case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
- fn
- case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
- tree
- case EmptyTree =>
- tree
- case _ =>
- if (tree.hasSymbol && (!localOnly || (erasedSyms contains tree.symbol)))
- tree.symbol = NoSymbol
- tree.tpe = null
- tree
+
+ class MarkLocals extends self.Traverser {
+ def markLocal(tree: Tree) {
+ if (tree.symbol != null && tree.symbol != NoSymbol) {
+ val sym = tree.symbol
+ registerLocal(sym)
+ registerLocal(sym.sourceModule)
+ registerLocal(sym.moduleClass)
+ }
+ }
+
+ override def traverse(tree: Tree) = {
+ tree match {
+ case _: DefTree | Function(_, _) | Template(_, _, _) =>
+ markLocal(tree)
+ case _ if tree.symbol.isInstanceOf[FreeVar] =>
+ markLocal(tree)
+ case _ =>
+ ;
+ }
+
+ super.traverse(tree)
+ }
+ }
+
+ class Transformer extends self.Transformer {
+ override def transform(tree: Tree): Tree = super.transform {
+ tree match {
+ case tpt: TypeTree =>
+ if (tpt.original != null) {
+ transform(tpt.original)
+ } else {
+ if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol))))
+ tpt.tpe = null
+ tree
+ }
+ case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
+ transform(fn)
+ case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
+ tree
+ case EmptyTree =>
+ tree
+ case _ =>
+ if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)))
+ tree.symbol = NoSymbol
+ tree.tpe = null
+ tree
+ }
}
}
- def transformPoly[T <: Tree](x: T): T = {
- val x1 = transform(x)
+
+ def transform[T <: Tree](x: T): T = {
+ new MarkLocals().traverse(x)
+
+ if (debug) {
+ assert(locals.size == orderedLocals.size)
+ val eoln = System.getProperty("line.separator")
+ val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString eoln
+ trace("locals (%d total): %n".format(orderedLocals.size))(msg)
+ }
+
+ val x1 = new Transformer().transform(x)
assert(x.getClass isInstance x1)
x1.asInstanceOf[T]
}
@@ -295,7 +333,6 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case Parens(expr) (only used during parsing)
case DocDef(comment, defn) => (eliminated by typer)
- case AssignOrNamedArg(lhs, rhs) => (eliminated by typer)
case TypeTreeWithDeferredRefCheck() => (created and eliminated by typer)
case SelectFromArray(_, _, _) => (created and eliminated by erasure)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 7388ecf163..46ade7d889 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -117,7 +117,7 @@ trait MarkupParsers {
* | `{` scalablock `}`
*/
def xAttributes = {
- val aMap = mutable.HashMap[String, Tree]()
+ val aMap = mutable.LinkedHashMap[String, Tree]()
while (isNameStart(ch)) {
val start = curOffset
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 00ac3976a9..fe6dcc9138 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -28,14 +28,17 @@ trait ParsersCommon extends ScannersCommon {
val global : Global
import global._
- trait ParserCommon {
+ /** This is now an abstract class, only to work around the optimizer:
+ * methods in traits are never inlined.
+ */
+ abstract class ParserCommon {
val in: ScannerCommon
def freshName(prefix: String): Name
def freshTermName(prefix: String): TermName
def freshTypeName(prefix: String): TypeName
def deprecationWarning(off: Int, msg: String): Unit
def accept(token: Int): Int
-
+
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
* instead of the contents of the groupers. However in all cases accept(LPAREN)
@@ -123,8 +126,6 @@ self =>
val global: Global
import global._
- private val glob: global.type = global
-
case class OpInfo(operand: Tree, operator: Name, offset: Offset)
class SourceFileParser(val source: SourceFile) extends Parser {
@@ -314,7 +315,7 @@ self =>
val stmts = templateStatSeq(false)._2
accept(EOF)
- def mainModuleName = settings.script.value
+ def mainModuleName = newTermName(settings.script.value)
/** If there is only a single object template in the file and it has a
* suitable main method, we will use it rather than building another object
* around it. Since objects are loaded lazily the whole script would have
@@ -343,7 +344,7 @@ self =>
* whole additional parse. So instead, if the actual object's name differs from
* what the script is expecting, we transform it to match.
*/
- if (name.toString == mainModuleName) md
+ if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
/** If we see anything but the above, fail. */
@@ -352,7 +353,7 @@ self =>
Some(makePackaging(0, emptyPkg, newStmts))
}
- if (mainModuleName == ScriptRunner.defaultScriptMain)
+ if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
/** Here we are building an AST representing the following source fiction,
@@ -384,13 +385,13 @@ self =>
// def main
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
- def mainParameter = List(ValDef(Modifiers(Flags.PARAM), "argv", mainParamType, EmptyTree))
- def mainSetArgv = List(ValDef(NoMods, "args", TypeTree(), Ident("argv")))
+ def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
// object Main
- def moduleName = ScriptRunner scriptMain settings
+ def moduleName = newTermName(ScriptRunner scriptMain settings)
def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
@@ -616,7 +617,7 @@ self =>
def isLiteralToken(token: Int) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
- STRINGLIT | STRINGPART | SYMBOLLIT | TRUE | FALSE | NULL => true
+ STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
@@ -980,6 +981,7 @@ self =>
nme.ERROR
}
def ident(): Name = ident(true)
+ def rawIdent(): Name = try in.name finally in.nextToken()
/** For when it's known already to be a type name. */
def identForType(): TypeName = ident().toTypeName
@@ -1101,7 +1103,7 @@ self =>
* }}}
* @note The returned tree does not yet have a position
*/
- def literal(isNegated: Boolean): Tree = {
+ def literal(isNegated: Boolean = false): Tree = {
def finish(value: Any): Tree = {
val t = Literal(Constant(value))
in.nextToken()
@@ -1109,19 +1111,19 @@ self =>
}
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == STRINGPART)
+ else if (in.token == INTERPOLATIONID)
interpolatedString()
else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT => in.strVal
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
syntaxErrorOrIncomplete("illegal literal", true)
null
})
@@ -1136,16 +1138,27 @@ self =>
}
}
- private def interpolatedString(): Tree = {
- var t = atPos(o2p(in.offset))(New(TypeTree(definitions.StringBuilderClass.tpe), List(List())))
+ private def interpolatedString(): Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name
+
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
while (in.token == STRINGPART) {
- t = stringOp(t, nme.append)
- var e = expr()
- if (in.token == STRINGFMT) e = stringOp(e, nme.formatted)
- t = atPos(t.pos.startOrPoint)(Apply(Select(t, nme.append), List(e)))
+ partsBuf += literal()
+ exprBuf += {
+ if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
+ else expr()
+ }
}
- if (in.token == STRINGLIT) t = stringOp(t, nme.append)
- atPos(t.pos)(Select(t, nme.toString_))
+ if (in.token == STRINGLIT) partsBuf += literal()
+
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1465,8 +1478,9 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name: Name = "unary_" + ident()
- if (in.name == raw.MINUS && isNumericLit) simpleExprRest(atPos(in.offset)(literal(true)), true)
+ val name = nme.toUnaryName(rawIdent())
+ // val name = nme.toUnaryName(ident()) // val name: Name = "unary_" + ident()
+ if (name == nme.UNARY_- && isNumericLit) simpleExprRest(atPos(in.offset)(literal(isNegated = true)), true)
else Select(stripParens(simpleExpr()), name)
}
}
@@ -1490,7 +1504,7 @@ self =>
def simpleExpr(): Tree = {
var canApply = true
val t =
- if (isLiteral) atPos(in.offset)(literal(false))
+ if (isLiteral) atPos(in.offset)(literal())
else in.token match {
case XMLSTART =>
xmlLiteral()
@@ -1533,12 +1547,12 @@ self =>
case LBRACKET =>
val t1 = stripParens(t)
t1 match {
- case Ident(_) | Select(_, _) =>
- var tapp: Tree = t1
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ var app: Tree = t1
while (in.token == LBRACKET)
- tapp = atPos(tapp.pos.startOrPoint, in.offset)(TypeApply(tapp, exprTypeArgs()))
+ app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
- simpleExprRest(tapp, true)
+ simpleExprRest(app, true)
case _ =>
t1
}
@@ -1743,11 +1757,16 @@ self =>
* }}}
*/
def pattern1(): Tree = pattern2() match {
- case p @ Ident(name) if treeInfo.isVarPattern(p) && in.token == COLON =>
- atPos(p.pos.startOrPoint, in.skipToken()) { Typed(p, compoundType()) }
- case p =>
- p
+ case p @ Ident(name) if in.token == COLON =>
+ if (treeInfo.isVarPattern(p))
+ atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+ else {
+ syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
+ p
+ }
+ case p => p
}
+
/** {{{
* Pattern2 ::= varid [ @ Pattern3 ]
* | Pattern3
@@ -1819,7 +1838,7 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(true) }
+ return atPos(start) { literal(isNegated = true) }
case _ =>
}
case _ =>
@@ -1836,8 +1855,8 @@ self =>
in.nextToken()
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
- STRINGLIT | STRINGPART | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal(false) }
+ STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
+ atPos(start) { literal() }
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
@@ -2430,7 +2449,7 @@ self =>
else {
val nameOffset = in.offset
val name = ident()
- if (name == nme.macro_ && isIdent && settings.Xexperimental.value)
+ if (name == nme.macro_ && isIdent && settings.Xmacros.value)
funDefRest(start, in.offset, mods | Flags.MACRO, ident())
else
funDefRest(start, nameOffset, mods, name)
@@ -2461,6 +2480,9 @@ self =>
restype = scalaUnitConstr
blockExpr()
} else {
+ if (name == nme.macro_ && isIdent && in.token != EQUALS) {
+ warning("this syntactically invalid code resembles a macro definition. have you forgotten to enable -Xmacros?")
+ }
equalsExpr()
}
DefDef(newmods, name, tparams, vparamss, restype, rhs)
@@ -2520,7 +2542,7 @@ self =>
newLinesOpt()
atPos(start, in.offset) {
val name = identForType()
- if (name == nme.macro_.toTypeName && isIdent && settings.Xexperimental.value) {
+ if (name == nme.macro_.toTypeName && isIdent && settings.Xmacros.value) {
funDefRest(start, in.offset, mods | Flags.MACRO, identForType())
} else {
// @M! a type alias as well as an abstract type may declare type parameters
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index a25b3afbc6..f712c7411f 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -84,6 +84,8 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
+
+ def isAtEnd = charOffset >= buf.length
def flush = { charOffset = offset; nextChar(); this }
@@ -161,12 +163,25 @@ trait Scanners extends ScannersCommon {
* RBRACKET if region starts with '['
* RBRACE if region starts with '{'
* ARROW if region starts with `case'
- * STRINGFMT if region is a string interpolation expression starting with '\{'
+ * STRINGLIT if region is a string interpolation expression starting with '${'
+ * (the STRINGLIT appears twice in succession on the stack iff the
+ * expression is a multiline string literal).
*/
var sepRegions: List[Int] = List()
// Get next token ------------------------------------------------------------
+ /** Are we directly in a string interpolation expression?
+ */
+ @inline private def inStringInterpolation =
+ sepRegions.nonEmpty && sepRegions.head == STRINGLIT
+
+ /** Are we directly in a multiline string interpolation expression?
+ * @pre: inStringInterpolation
+ */
+ @inline private def inMultiLineInterpolation =
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+
/** read next token and return last offset
*/
def skipToken(): Offset = {
@@ -189,29 +204,33 @@ trait Scanners extends ScannersCommon {
sepRegions = RBRACE :: sepRegions
case CASE =>
sepRegions = ARROW :: sepRegions
- case STRINGPART =>
- sepRegions = STRINGFMT :: sepRegions
case RBRACE =>
- sepRegions = sepRegions dropWhile (_ != RBRACE)
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- case RBRACKET | RPAREN | ARROW | STRINGFMT =>
+ docBuffer = null
+ case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- case _ =>
- }
- (lastToken: @switch) match {
- case RBRACE | RBRACKET | RPAREN =>
docBuffer = null
+ case ARROW =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case STRINGLIT =>
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
case _ =>
}
-
+
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
- if(lastOffset > 0 && buf(lastOffset) == '\n' && buf(lastOffset - 1) == '\r') {
+ if (lastOffset > 0 && buf(lastOffset) == '\n' && buf(lastOffset - 1) == '\r') {
lastOffset -= 1
}
- fetchToken()
+ if (inStringInterpolation) fetchStringPart() else fetchToken()
} else {
this copyFrom next
next.token = EMPTY
@@ -308,7 +327,9 @@ trait Scanners extends ScannersCommon {
'z' =>
putChar(ch)
nextChar()
- getIdentRest() // scala-mode: wrong indent for multi-line case blocks
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
+ token = INTERPOLATIONID
case '<' => // is XMLSTART?
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
nextChar()
@@ -360,18 +381,37 @@ trait Scanners extends ScannersCommon {
case '`' =>
getBackquotedIdent()
case '\"' =>
- nextChar()
- if (ch == '\"') {
- nextChar()
+ if (token == INTERPOLATIONID) {
+ nextRawChar()
if (ch == '\"') {
nextRawChar()
- getMultiLineStringLit()
+ if (ch == '\"') {
+ nextRawChar()
+ getStringPart(multiLine = true)
+ sepRegions = STRINGPART :: sepRegions // indicate string part
+ sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
} else {
- token = STRINGLIT
- strVal = ""
+ getStringPart(multiLine = false)
+ sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
} else {
- getStringPart()
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getRawStringLit()
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringLit()
+ }
}
case '\'' =>
nextChar()
@@ -397,9 +437,7 @@ trait Scanners extends ScannersCommon {
token = DOT
}
case ';' =>
- nextChar()
- if (inStringInterpolation) getFormatString()
- else token = SEMI
+ nextChar(); token = SEMI
case ',' =>
nextChar(); token = COMMA
case '(' =>
@@ -409,22 +447,13 @@ trait Scanners extends ScannersCommon {
case ')' =>
nextChar(); token = RPAREN
case '}' =>
- if (token == STRINGFMT) {
- nextChar()
- getStringPart()
- } else if (inStringInterpolation) {
- strVal = "";
- token = STRINGFMT
- } else {
- nextChar();
- token = RBRACE
- }
+ nextChar(); token = RBRACE
case '[' =>
nextChar(); token = LBRACKET
case ']' =>
nextChar(); token = RBRACKET
case SU =>
- if (charOffset >= buf.length) token = EOF
+ if (isAtEnd) token = EOF
else {
syntaxError("illegal character")
nextChar()
@@ -443,7 +472,7 @@ trait Scanners extends ScannersCommon {
nextChar()
getOperatorRest()
} else {
- syntaxError("illegal character")
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
nextChar()
}
}
@@ -506,11 +535,6 @@ trait Scanners extends ScannersCommon {
}
}
- /** Are we directly in a string interpolation expression?
- */
- private def inStringInterpolation =
- sepRegions.nonEmpty && sepRegions.head == STRINGFMT
-
/** Can token start a statement? */
def inFirstOfStat(token: Int) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
@@ -608,71 +632,110 @@ trait Scanners extends ScannersCommon {
else finishNamed()
}
}
+
+
+// Literals -----------------------------------------------------------------
- def getFormatString() = {
- getLitChars('}', '"', ' ', '\t')
- if (ch == '}') {
- setStrVal()
- if (strVal.length > 0) strVal = "%" + strVal
- token = STRINGFMT
- } else {
- syntaxError("unclosed format string")
- }
- }
-
- def getStringPart() = {
- while (ch != '"' && (ch != CR && ch != LF && ch != SU || isUnicodeEscape) && maybeGetLitChar()) {}
+ private def getStringLit() = {
+ getLitChars('"')
if (ch == '"') {
setStrVal()
nextChar()
token = STRINGLIT
- } else if (ch == '{' && settings.Xexperimental.value) {
- setStrVal()
- nextChar()
- token = STRINGPART
- } else {
- syntaxError("unclosed string literal")
- }
+ } else syntaxError("unclosed string literal")
}
- private def getMultiLineStringLit() {
+ private def getRawStringLit(): Unit = {
if (ch == '\"') {
nextRawChar()
- if (ch == '\"') {
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getRawStringLit()
+ } else if (ch == SU) {
+ incompleteInputError("unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextRawChar()
+ getRawStringLit()
+ }
+ }
+
+ @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ def finishStringPart() = {
+ setStrVal()
+ token = STRINGPART
+ next.lastOffset = charOffset - 1
+ next.offset = charOffset - 1
+ }
+ if (ch == '"') {
+ nextRawChar()
+ if (!multiLine || isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getStringPart(multiLine)
+ } else if (ch == '$') {
+ nextRawChar()
+ if (ch == '$') {
+ putChar(ch)
nextRawChar()
- if (ch == '\"') {
- nextChar()
- while (ch == '\"') {
- putChar('\"')
- nextChar()
- }
- token = STRINGLIT
- setStrVal()
- } else {
- putChar('\"')
- putChar('\"')
- getMultiLineStringLit()
- }
+ getStringPart(multiLine)
+ } else if (ch == '{') {
+ finishStringPart()
+ nextRawChar()
+ next.token = LBRACE
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ finishStringPart()
+ do {
+ putChar(ch)
+ nextRawChar()
+ } while (Character.isUnicodeIdentifierPart(ch))
+ next.token = IDENTIFIER
+ next.name = newTermName(cbuf.toString)
+ cbuf.clear()
} else {
- putChar('\"')
- getMultiLineStringLit()
+ syntaxError("invalid string interpolation")
}
- } else if (ch == SU) {
- incompleteInputError("unclosed multi-line string literal")
+ } else if ((ch == CR || ch == LF || ch == SU) && !isUnicodeEscape) {
+ syntaxError("unclosed string literal")
} else {
putChar(ch)
nextRawChar()
- getMultiLineStringLit()
+ getStringPart(multiLine)
}
}
+
+ private def fetchStringPart() = {
+ offset = charOffset - 1
+ getStringPart(multiLine = inMultiLineInterpolation)
+ }
+
+ private def isTripleQuote(): Boolean =
+ if (ch == '"') {
+ nextRawChar()
+ if (ch == '"') {
+ nextChar()
+ while (ch == '"') {
+ putChar('"')
+ nextChar()
+ }
+ true
+ } else {
+ putChar('"')
+ putChar('"')
+ false
+ }
+ } else {
+ putChar('"')
+ false
+ }
-// Literals -----------------------------------------------------------------
-
- /** read next character in character or string literal:
- * if character sequence is a \{ escape, do not copy it into the string and return false.
- * otherwise return true.
+ /** copy current character into cbuf, interpreting any escape sequences,
+ * and advance to next character.
*/
- protected def maybeGetLitChar(): Boolean = {
+ protected def getLitChar(): Unit =
if (ch == '\\') {
nextChar()
if ('0' <= ch && ch <= '7') {
@@ -698,7 +761,6 @@ trait Scanners extends ScannersCommon {
case '\"' => putChar('\"')
case '\'' => putChar('\'')
case '\\' => putChar('\\')
- case '{' => return false
case _ => invalidEscape()
}
nextChar()
@@ -707,21 +769,15 @@ trait Scanners extends ScannersCommon {
putChar(ch)
nextChar()
}
- true
- }
protected def invalidEscape(): Unit = {
syntaxError(charOffset - 1, "invalid escape character")
putChar(ch)
}
- protected def getLitChar(): Unit =
- if (!maybeGetLitChar()) invalidEscape()
-
- private def getLitChars(delimiters: Char*) {
- while (!(delimiters contains ch) && (ch != CR && ch != LF && ch != SU || isUnicodeEscape)) {
+ private def getLitChars(delimiter: Char) = {
+ while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape))
getLitChar()
- }
}
/** read fractional part and exponent of floating point number
@@ -971,8 +1027,8 @@ trait Scanners extends ScannersCommon {
"string(" + strVal + ")"
case STRINGPART =>
"stringpart(" + strVal + ")"
- case STRINGFMT =>
- "stringfmt(" + strVal + ")"
+ case INTERPOLATIONID =>
+ "interpolationid(" + name + ")"
case SEMI =>
";"
case NEWLINE =>
@@ -1088,8 +1144,7 @@ trait Scanners extends ScannersCommon {
case LONGLIT => "long literal"
case FLOATLIT => "float literal"
case DOUBLELIT => "double literal"
- case STRINGLIT | STRINGPART => "string literal"
- case STRINGFMT => "format string"
+ case STRINGLIT | STRINGPART | INTERPOLATIONID => "string literal"
case SYMBOLLIT => "symbol literal"
case LPAREN => "'('"
case RPAREN => "')'"
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index b8fa55447a..ffe65aec63 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -58,11 +58,11 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
private object xmltypes extends XMLTypeNames {
type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
+ implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
}
private object xmlterms extends XMLTermNames {
type NameType = TermName
- implicit def createNameType(name: String): TermName = newTermName(name)
+ implicit def createNameType(name: String): TermName = newTermNameCached(name)
}
import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index b5ec0ceffb..e310611e68 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -24,7 +24,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
import global._
informProgress("parsing " + unit)
unit.body =
- if (unit.source.file.name.endsWith(".java")) new JavaUnitParser(unit).parse()
+ if (unit.isJava) new JavaUnitParser(unit).parse()
else if (reporter.incompleteHandled) new UnitParser(unit).parse()
else new UnitParser(unit).smartParse()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 07970bb36e..091f333c27 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -45,18 +45,20 @@ abstract class Tokens {
}
object Tokens extends Tokens {
- final val STRINGPART = 7
+ final val STRINGPART = 7 // a part of an interpolated string
final val SYMBOLLIT = 8
- final val STRINGFMT = 9
+ final val INTERPOLATIONID = 9 // the lead identifier of an interpolated string
+
def isLiteral(code: Int) =
- code >= CHARLIT && code <= SYMBOLLIT
+ code >= CHARLIT && code <= INTERPOLATIONID
+
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
def isIdentifier(code: Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
+
@switch def canBeginExpression(code: Int) = code match {
case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
case LBRACE|LPAREN|LBRACKET|COMMENT => true
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index d80a1c4f34..27df45b563 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -39,9 +39,9 @@ trait JavaPlatform extends Platform {
) ++ depAnalysisPhase
lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
- lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
- lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
- lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+ lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, nme.equalsNumNum)
+ lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, nme.equalsNumChar)
+ lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, nme.equalsNumObject)
/** We could get away with excluding BoxedBooleanClass for the
* purpose of equality testing since it need not compare equal
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 56e05cdc04..4ab0eb0129 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -3,13 +3,12 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ArrayBuffer }
+import mutable.{ ListBuffer, ArrayBuffer }
import util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
@@ -17,23 +16,83 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ settings, log, nme }
+ import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
+
+ object NoBasicBlock extends BasicBlock(-1, null)
/** This class represents a basic block. Each
* basic block contains a list of instructions that are
* either executed all, or none. No jumps
* to/from the "middle" of the basic block are allowed (modulo exceptions).
*/
- class BasicBlock(val label: Int, val method: IMethod)
- extends AnyRef
- with ProgramPoint[BasicBlock]
- with Seq[Instruction] {
+ class BasicBlock(val label: Int, val method: IMethod) extends ProgramPoint[BasicBlock] {
+ outer =>
import BBFlags._
def code = method.code
+ private final class SuccessorList() {
+ private var successors: List[BasicBlock] = Nil
+ private def updateConserve() {
+ var lb: ListBuffer[BasicBlock] = null
+ var matches = 0
+ var remaining = successors
+
+ def addBlock(bb: BasicBlock) {
+ if (matches < 0)
+ lb += bb
+ else if (remaining.isEmpty || bb != remaining.head) {
+ lb = ListBuffer[BasicBlock]() ++= (successors take matches) += bb
+ matches = -1
+ }
+ else {
+ matches += 1
+ remaining = remaining.tail
+ }
+ }
+
+ // exceptionSuccessors
+ method.exh foreach { handler =>
+ if (handler covers outer)
+ addBlock(handler.startBlock)
+ }
+ // directSuccessors
+ val direct = directSuccessors
+ direct foreach addBlock
+
+ /** Return a list of successors for 'b' that come from exception handlers
+ * covering b's (non-exceptional) successors. These exception handlers
+ * might not cover 'b' itself. This situation corresponds to an
+ * exception being thrown as the first thing of one of b's successors.
+ */
+ method.exh foreach { handler =>
+ direct foreach { block =>
+ if (handler covers block)
+ addBlock(handler.startBlock)
+ }
+ }
+ // Blocks did not align: create a new list.
+ if (matches < 0)
+ successors = lb.toList
+ // Blocks aligned, but more blocks remain. Take a prefix of the list.
+ else if (remaining.nonEmpty)
+ successors = successors take matches
+ // Otherwise the list is unchanged, leave it alone.
+ }
+
+ /** This is called millions of times: it is performance sensitive. */
+ def updateSuccs() {
+ if (isEmpty) {
+ if (successors.nonEmpty)
+ successors = Nil
+ }
+ else updateConserve()
+ }
+ def toList = successors
+ }
+
/** Flags of this basic block. */
private var flags: Int = 0
@@ -76,20 +135,23 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS | DIRTYPREDS)
/** Cached predecessors. */
- var preds: List[BasicBlock] = null
+ var preds: List[BasicBlock] = Nil
/** Local variables that are in scope at entry of this basic block. Used
* for debugging information.
*/
- var varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
+ val varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
/** ICode instructions, used as temporary storage while emitting code.
* Once closed is called, only the `instrs` array should be used.
*/
private var instructionList: List[Instruction] = Nil
-
private var instrs: Array[Instruction] = _
- override def toList: List[Instruction] =
+
+ def take(n: Int): Seq[Instruction] =
+ if (closed) instrs take n else instructionList takeRight n reverse
+
+ def toList: List[Instruction] =
if (closed) instrs.toList else instructionList.reverse
/** Return an iterator over the instructions in this basic block. */
@@ -117,17 +179,37 @@ trait BasicBlocks {
}
/** Apply a function to all the instructions of the block. */
- override def foreach[U](f: Instruction => U) = {
- // !!! This appears to change behavior if I try to avoid the implicit
- // conversion and traverse the array directly, which presumably means it
- // is dependent on some mutation which is taking place during traversal.
- // Please eliminate this if humanly possible.
+ final def foreach[U](f: Instruction => U) = {
if (!closed) dumpMethodAndAbort(method, this)
else instrs foreach f
+
+ // !!! If I replace "instrs foreach f" with the following:
+ // var i = 0
+ // val len = instrs.length
+ // while (i < len) {
+ // f(instrs(i))
+ // i += 1
+ // }
+ //
+ // Then when compiling under -optimise, quick.plugins fails as follows:
+ //
+ // quick.plugins:
+ // [mkdir] Created dir: /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] Compiling 5 files to /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] error: java.lang.VerifyError: (class: scala/tools/nsc/typechecker/Implicits$ImplicitSearch, method: typedImplicit0 signature: (Lscala/tools/nsc/typechecker/Implicits$ImplicitInfo;Z)Lscala/tools/nsc/typechecker/Implicits$SearchResult;) Incompatible object argument for function call
+ // [scalacfork] at scala.tools.nsc.typechecker.Implicits$class.inferImplicit(Implicits.scala:67)
+ // [scalacfork] at scala.tools.nsc.Global$$anon$1.inferImplicit(Global.scala:419)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.wrapImplicit$1(Typers.scala:170)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.inferView(Typers.scala:174)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:963)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4378)
+ //
+ // This is bad and should be understood/eliminated.
}
/** The number of instructions in this basic block so far. */
def length = if (closed) instrs.length else instructionList.length
+ def size = length
/** Return the n-th instruction. */
def apply(n: Int): Instruction =
@@ -208,7 +290,7 @@ trait BasicBlocks {
*/
def removeLastInstruction() {
if (closed)
- removeInstructionsAt(size)
+ removeInstructionsAt(length)
else {
instructionList = instructionList.tail
code.touched = true
@@ -321,10 +403,11 @@ trait BasicBlocks {
def clear() {
instructionList = Nil
instrs = null
- preds = null
+ preds = Nil
}
- override def isEmpty = instructionList.isEmpty
+ final def isEmpty = instructionList.isEmpty
+ final def nonEmpty = !isEmpty
/** Enter ignore mode: new 'emit'ted instructions will not be
* added to this basic block. It makes the generation of THROW
@@ -341,33 +424,33 @@ trait BasicBlocks {
/** Return the last instruction of this basic block. */
def lastInstruction =
- if (closed) instrs.last
+ if (closed) instrs(instrs.length - 1)
else instructionList.head
def firstInstruction =
if (closed) instrs(0)
else instructionList.last
+ def exceptionSuccessors: List[BasicBlock] =
+ exceptionSuccessorsForBlock(this)
+
def exceptionSuccessorsForBlock(block: BasicBlock): List[BasicBlock] =
method.exh collect { case x if x covers block => x.startBlock }
/** Cached value of successors. Must be recomputed whenever a block in the current method is changed. */
- private var succs: List[BasicBlock] = Nil
- private def updateSuccs() {
- resetFlag(DIRTYSUCCS)
- succs =
- if (isEmpty) Nil
- else exceptionSuccessors ++ directSuccessors ++ indirectExceptionSuccessors
- }
+ private val succs = new SuccessorList
- def successors : List[BasicBlock] = {
- if (touched) updateSuccs()
- succs
+ def successors: List[BasicBlock] = {
+ if (touched) {
+ succs.updateSuccs()
+ resetFlag(DIRTYSUCCS)
+ }
+ succs.toList
}
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => List(whereto)
+ case JUMP(whereto) => whereto :: Nil
case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
case SWITCH(_, labels) => labels
@@ -379,17 +462,6 @@ trait BasicBlocks {
else Nil
}
- def exceptionSuccessors: List[BasicBlock] =
- exceptionSuccessorsForBlock(this)
-
- /** Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
- */
- def indirectExceptionSuccessors: List[BasicBlock] =
- directSuccessors flatMap exceptionSuccessorsForBlock distinct
-
/** Returns the predecessors of this block. */
def predecessors: List[BasicBlock] = {
if (hasFlag(DIRTYPREDS)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 1880bdc52c..ffc6640743 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -3,13 +3,11 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable, generic }
-import util.{ Position, NoPosition }
+import scala.collection.{ mutable, immutable }
/**
* Exception handlers are pieces of code that `handle` exceptions on
@@ -21,10 +19,10 @@ import util.{ Position, NoPosition }
trait ExceptionHandlers {
self: ICodes =>
- import global.{ definitions, Symbol, NoSymbol }
+ import global._
import definitions.{ ThrowableClass }
- class ExceptionHandler(val method: IMethod, val label: String, val cls: Symbol, val pos: Position) {
+ class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
private var _startBlock: BasicBlock = _;
var finalizer: Finalizer = _;
@@ -69,12 +67,12 @@ trait ExceptionHandlers {
def dup: ExceptionHandler = new ExceptionHandler(this)
}
- class Finalizer(method: IMethod, label: String, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
+ class Finalizer(method: IMethod, label: TermName, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
- object NoFinalizer extends Finalizer(null, "<no finalizer>", NoPosition) {
+ object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
override def dup = this
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 3f0a0fac1a..6aee52a354 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -102,7 +102,7 @@ abstract class GenICode extends SubComponent {
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
debuglog("Entering method " + name)
val m = new IMethod(tree.symbol)
- m.sourceFile = unit.source.toString()
+ m.sourceFile = unit.source
m.returnType = if (tree.symbol.isConstructor) UNIT
else toTypeKind(tree.symbol.info.resultType)
ctx.clazz.addMethod(m)
@@ -133,7 +133,7 @@ abstract class GenICode extends SubComponent {
if (!ctx1.bb.closed) ctx1.bb.close
prune(ctx1.method)
} else
- ctx1.method.setCode(null)
+ ctx1.method.setCode(NoCode)
ctx1
case Template(_, _, body) =>
@@ -179,7 +179,7 @@ abstract class GenICode extends SubComponent {
}
private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableClass.tpe)
+ require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
@@ -480,7 +480,7 @@ abstract class GenICode extends SubComponent {
*/
private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
val REFERENCE(clssym) = kind
- assert(loaders.clrTypes.isNonEnumValuetype(clssym))
+ assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
ctx.method.addLocal(local)
ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
@@ -1064,7 +1064,7 @@ abstract class GenICode extends SubComponent {
var default: BasicBlock = afterCtx.bb
for (caze @ CaseDef(pat, guard, body) <- cases) {
- assert(guard == EmptyTree)
+ assert(guard == EmptyTree, guard)
val tmpCtx = ctx1.newBlock
pat match {
case Literal(value) =>
@@ -1072,6 +1072,15 @@ abstract class GenICode extends SubComponent {
targets = tmpCtx.bb :: targets
case Ident(nme.WILDCARD) =>
default = tmpCtx.bb
+ case Alternative(alts) =>
+ alts foreach {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case _ =>
+ abort("Invalid case in alternative in switch-like pattern match: " +
+ tree + " at: " + tree.pos)
+ }
case _ =>
abort("Invalid case statement in switch-like pattern match: " +
tree + " at: " + (tree.pos))
@@ -1293,7 +1302,7 @@ abstract class GenICode extends SubComponent {
/** The Object => String overload.
*/
- private lazy val String_valueOf: Symbol = getMember(StringModule, "valueOf") filter (sym =>
+ private lazy val String_valueOf: Symbol = getMember(StringModule, nme.valueOf) filter (sym =>
sym.info.paramTypes match {
case List(pt) => pt.typeSymbol == ObjectClass
case _ => false
@@ -1305,7 +1314,7 @@ abstract class GenICode extends SubComponent {
// case we want to get more precise.
//
// private def valueOfForType(tp: Type): Symbol = {
- // val xs = getMember(StringModule, "valueOf") filter (sym =>
+ // val xs = getMember(StringModule, nme.valueOf) filter (sym =>
// // We always exclude the Array[Char] overload because java throws an NPE if
// // you pass it a null. It will instead find the Object one, which doesn't.
// sym.info.paramTypes match {
@@ -1352,7 +1361,7 @@ abstract class GenICode extends SubComponent {
def genScalaHash(tree: Tree, ctx: Context): Context = {
val hashMethod = {
ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
- getMember(ScalaRunTimeModule, "hash")
+ getMember(ScalaRunTimeModule, nme.hash_)
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
@@ -1528,7 +1537,7 @@ abstract class GenICode extends SubComponent {
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
val equalsMethod =
- if (!settings.XO.value) {
+ if (!settings.optimise.value) {
def default = platform.externalEquals
platform match {
case x: JavaPlatform =>
@@ -1550,7 +1559,7 @@ abstract class GenICode extends SubComponent {
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
- ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.XO.value) Dynamic else Static(false)))
+ ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)))
ctx2.bb.emit(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL))
ctx2.bb.close
}
@@ -1716,7 +1725,7 @@ abstract class GenICode extends SubComponent {
do {
changed = false
n += 1
- method.code.blocks foreach prune0
+ method.blocks foreach prune0
} while (changed)
debuglog("Prune fixpoint reached in " + n + " iterations.");
@@ -1755,7 +1764,7 @@ abstract class GenICode extends SubComponent {
val sym = t.symbol
def getLabel(pos: Position, name: Name) =
labels.getOrElseUpdate(sym,
- method.newLabel(sym.pos, unit.freshTermName(name.toString)) setInfo sym.tpe
+ method.newLabel(unit.freshTermName(name.toString), sym.pos) setInfo sym.tpe
)
t match {
@@ -1924,7 +1933,7 @@ abstract class GenICode extends SubComponent {
val ctx1 = new Context(this) setMethod(m)
ctx1.labels = mutable.HashMap()
ctx1.method.code = new Code(m)
- ctx1.bb = ctx1.method.code.startBlock
+ ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
ctx1.enterScope
@@ -1932,11 +1941,12 @@ abstract class GenICode extends SubComponent {
}
/** Return a new context for a new basic block. */
- def newBlock: Context = {
+ def newBlock(): Context = {
val block = method.code.newBlock
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
- block.varsInScope = mutable.HashSet() ++= scope.varsInScope
+ block.varsInScope.clear()
+ block.varsInScope ++= scope.varsInScope
new Context(this) setBasicBlock block
}
@@ -1958,7 +1968,7 @@ abstract class GenICode extends SubComponent {
*/
private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
handlerCount += 1
- val exh = new ExceptionHandler(method, "" + handlerCount, cls, pos)
+ val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
@@ -2004,9 +2014,7 @@ abstract class GenICode extends SubComponent {
/** Make a fresh local variable. It ensures the 'name' is unique. */
def makeLocal(pos: Position, tpe: Type, name: String): Local = {
- val sym = method.symbol.newVariable(pos, unit.freshTermName(name))
- .setInfo(tpe)
- .setFlag(Flags.SYNTHETIC)
+ val sym = method.symbol.newVariable(unit.freshTermName(name), pos, Flags.SYNTHETIC) setInfo tpe
this.method.addLocal(new Local(sym, toTypeKind(tpe), false))
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 7a0017944b..631b71d83a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -84,15 +84,16 @@ abstract class ICodes extends AnyRef
def checkValid(m: IMethod) {
// always slightly dicey to iterate over mutable structures
- val bs = m.code.blocks.toList
- for (b <- bs ; if !b.closed) {
- // Something is leaving open/empty blocks around (see SI-4840) so
- // let's not kill the deal unless it's nonempty.
- if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
- m.code removeBlock b
+ m foreachBlock { b =>
+ if (!b.closed) {
+ // Something is leaving open/empty blocks around (see SI-4840) so
+ // let's not kill the deal unless it's nonempty.
+ if (b.isEmpty) {
+ log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ m.code removeBlock b
+ }
+ else dumpMethodAndAbort(m, b)
}
- else dumpMethodAndAbort(m, b)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 1978a23d90..f71c8de449 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -36,7 +36,7 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.code.startBlock;
+ val b = m.startBlock;
blocks = Nil;
run {
@@ -106,7 +106,7 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = {
blocks = Nil;
- dfs(m.code.startBlock);
+ dfs(m.startBlock);
m.exh foreach (b => dfs(b.startBlock));
blocks.reverse
@@ -150,14 +150,14 @@ trait Linearizers {
added.clear;
m.exh foreach (b => rpo(b.startBlock));
- rpo(m.code.startBlock);
+ rpo(m.startBlock);
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
- if (m.code.startBlock.predecessors eq Nil)
+ if (m.startBlock.predecessors eq Nil)
blocks
else
- m.code.startBlock :: (blocks.filterNot(_ == m.code.startBlock))
+ m.startBlock :: (blocks.filterNot(_ == m.startBlock))
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -195,7 +195,7 @@ trait Linearizers {
* the last instruction being a jump).
*/
class DumpLinearizer extends Linearizer {
- def linearize(m: IMethod): List[BasicBlock] = m.code.blocks.toList
+ def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
@@ -250,7 +250,7 @@ trait Linearizers {
* @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
*/
def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.code.startBlock, method)
+ assert(blocks.head == method.startBlock, method)
// blocks before the try, and blocks for the try
val beforeAndTry = new ListBuffer[BasicBlock]()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 2f43d43bdd..298c9171a1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,13 +3,13 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
+import util.{ SourceFile, NoSourceFile }
import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
@@ -17,30 +17,34 @@ trait ReferenceEquality {
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
}
-trait Members { self: ICodes =>
+trait Members {
+ self: ICodes =>
+
import global._
+
+ object NoCode extends Code(null, "NoCode") {
+ override def blocksList: List[BasicBlock] = Nil
+ }
/**
* This class represents the intermediate code of a method or
* other multi-block piece of code, like exception handlers.
*/
- class Code(label: String, method: IMethod) {
- def this(method: IMethod) = this(method.symbol.simpleName.toString, method)
-
+ class Code(method: IMethod, name: String) {
+ def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
/** The set of all blocks */
val blocks = mutable.ListBuffer[BasicBlock]()
/** The start block of the method */
- var startBlock: BasicBlock = null
-
- /** The stack produced by this method */
- var producedStack: TypeStack = null
+ var startBlock: BasicBlock = NoBasicBlock
private var currentLabel: Int = 0
private var _touched = false
- def blockCount = blocks.size
- def instructionCount = blocks map (_.length) sum
+ def blocksList: List[BasicBlock] = blocks.toList
+ def instructions = blocksList flatMap (_.iterator)
+ def blockCount = blocks.size
+ def instructionCount = blocks map (_.length) sum
def touched = _touched
def touched_=(b: Boolean): Unit = {
@@ -73,7 +77,7 @@ trait Members { self: ICodes =>
}
/** This methods returns a string representation of the ICode */
- override def toString() : String = "ICode '" + label + "'";
+ override def toString = "ICode '" + name + "'";
/* Compute a unique new label */
def nextLabel: Int = {
@@ -83,7 +87,7 @@ trait Members { self: ICodes =>
/* Create a new block and append it to the list
*/
- def newBlock: BasicBlock = {
+ def newBlock(): BasicBlock = {
touched = true
val block = new BasicBlock(nextLabel, method);
blocks += block;
@@ -134,6 +138,8 @@ trait Members { self: ICodes =>
/** Represent a field in ICode */
class IField(val symbol: Symbol) extends IMember { }
+
+ object NoIMethod extends IMethod(NoSymbol) { }
/**
* Represents a method in ICode. Local variables contain
@@ -146,14 +152,23 @@ trait Members { self: ICodes =>
* finished (GenICode does that).
*/
class IMethod(val symbol: Symbol) extends IMember {
- var code: Code = null
+ var code: Code = NoCode
+
+ def newBlock() = code.newBlock
+ def startBlock = code.startBlock
+ def lastBlock = blocks.last
+ def blocks = code.blocksList
+ def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
+
+ def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
+ def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
+
var native = false
/** The list of exception handlers, ordered from innermost to outermost. */
var exh: List[ExceptionHandler] = Nil
- var sourceFile: String = _
+ var sourceFile: SourceFile = NoSourceFile
var returnType: TypeKind = _
-
var recursive: Boolean = false
/** local variables and method parameters */
@@ -162,7 +177,7 @@ trait Members { self: ICodes =>
/** method parameters */
var params: List[Local] = Nil
- def hasCode = code != null
+ def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
this.code = code;
this
@@ -200,12 +215,14 @@ trait Members { self: ICodes =>
import opcodes._
def checkLocals(): Unit = {
- def localsSet = code.blocks.flatten collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- } toSet
+ def localsSet = (code.blocks flatMap { bb =>
+ bb.iterator collect {
+ case LOAD_LOCAL(l) => l
+ case STORE_LOCAL(l) => l
+ }
+ }).toSet
- if (code != null) {
+ if (hasCode) {
log("[checking locals of " + this + "]")
locals filterNot localsSet foreach { l =>
log("Local " + l + " is not declared in " + this)
@@ -219,7 +236,7 @@ trait Members { self: ICodes =>
*
* This method should be most effective after heavy inlining.
*/
- def normalize(): Unit = if (this.code ne null) {
+ def normalize(): Unit = if (this.hasCode) {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index ff4abbb757..4ea253d29d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -82,7 +82,7 @@ trait Printers { self: ICodes =>
if (!m.isAbstractMethod) {
println(" {")
println("locals: " + m.locals.mkString("", ", ", ""))
- println("startBlock: " + m.code.startBlock)
+ println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
println
lin.linearize(m) foreach printBlock
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 45ab7ae43c..ba4b250303 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -21,6 +21,8 @@ trait TypeStacks {
* stack of the ICode.
*/
type Rep = List[TypeKind]
+
+ object NoTypeStack extends TypeStack(Nil) { }
class TypeStack(var types: Rep) {
if (types.nonEmpty)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 5af5b05682..f5be82a776 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -194,12 +194,12 @@ abstract class CopyPropagation {
this.method = m
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = lattice.bottom
out(b) = lattice.bottom
- assert(out.contains(b))
+ assert(out.contains(b), out)
log("Added point: " + b)
}
m.exh foreach { e =>
@@ -207,21 +207,21 @@ abstract class CopyPropagation {
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.code.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
}
}
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.foldLeft(in)(interpret)
+ b.iterator.foldLeft(in)(interpret)
import opcodes._
@@ -520,8 +520,8 @@ abstract class CopyPropagation {
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
- val bindings = mutable.HashMap[Symbol, Value]()
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -562,13 +562,12 @@ abstract class CopyPropagation {
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
- final override def toString(): String = {
- var res = ""
- for (b <- this.method.code.blocks.toList)
- res = (res + "\nIN(" + b.label + "):\t Bindings: " + in(b).bindings +
- "\nIN(" + b.label +"):\t Stack: " + in(b).stack) + "\n";
- res
- }
+ final override def toString() = (
+ method.blocks map { b =>
+ "\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
+ "\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
+ } mkString
+ )
} /* class CopyAnalysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index c656219dc8..49f5b51d51 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -33,10 +33,9 @@ abstract class Liveness {
final class LivenessAnalysis extends DataFlowAnalysis[livenessLattice.type] {
type P = BasicBlock
- val lattice = livenessLattice
- var method: IMethod = _
-
- val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
+ val lattice = livenessLattice
+ var method: IMethod = _
+ val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
val kill: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
def init(m: IMethod) {
@@ -44,14 +43,15 @@ abstract class Liveness {
gen.clear()
kill.clear()
- for (b <- m.code.blocks; (g, k) = genAndKill(b)) {
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
gen += (b -> g)
kill += (b -> k)
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
@@ -75,7 +75,7 @@ abstract class Liveness {
override def run() {
backwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
@@ -89,29 +89,14 @@ abstract class Liveness {
* liveness *before* the given instruction `i`.
*/
def interpret(out: lattice.Elem, i: Instruction): lattice.Elem = {
- var in = out
-
- if (settings.debug.value) {
- log("- " + i)
- log("out: " + out)
- log("\n")
- }
-
+ debuglog("- " + i + "\nout: " + out + "\n")
i match {
- case LOAD_LOCAL(l) => in += l
- case STORE_LOCAL(l) => in -= l
- case _ =>
- ()
- }
- in
- } /* def interpret */
-
- override def toString(): String = {
- val buf = new StringBuilder()
- for (b <- method.code.blocks.toList) {
- buf.append("\nlive-in(" + b + ")=" + in(b) + "\nlive-out(" + b + ")=" + out(b));
+ case LOAD_LOCAL(l) => out + l
+ case STORE_LOCAL(l) => out - l
+ case _ => out
}
- buf.toString()
}
+ override def toString() =
+ method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b))) mkString
} /* Liveness analysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index eac714f999..c06bd2e097 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -78,7 +78,10 @@ abstract class ReachingDefinitions {
drops.clear()
outStack.clear()
- for (b <- m.code.blocks.toList; (g, k) = genAndKill(b); (d, st) = dropsAndGen(b)) {
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
+ val (d, st) = dropsAndGen(b)
+
gen += (b -> g)
kill += (b -> k)
drops += (b -> d)
@@ -86,8 +89,8 @@ abstract class ReachingDefinitions {
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
@@ -141,7 +144,7 @@ abstract class ReachingDefinitions {
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index a34d269cc9..6421d6c8ef 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -110,16 +110,16 @@ abstract class TypeFlowAnalysis {
this.method = m
//typeFlowLattice.lubs = 0
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = typeFlowLattice.bottom
out(b) = typeFlowLattice.bottom
}
// start block has var bindings for each of its parameters
val entryBindings = new VarBinding ++= (m.params map (p => ((p, p.kind))))
- in(m.code.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
+ in(m.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
m.exh foreach { e =>
in(e.startBlock) = lattice.IState(in(e.startBlock).vars, typeStackLattice.exceptionHandlerStack)
@@ -132,16 +132,18 @@ abstract class TypeFlowAnalysis {
if (this.method == null || this.method.symbol != m.symbol)
init(m)
else reinit {
- for (b <- m.code.blocks; if !in.isDefinedAt(b)) {
- for (p <- b.predecessors) {
- if (out.isDefinedAt(p)) {
- in(b) = out(p)
- worklist += p
- }
-/* else
- in(b) = typeFlowLattice.bottom
-*/ }
- out(b) = typeFlowLattice.bottom
+ m foreachBlock { b =>
+ if (!in.contains(b)) {
+ for (p <- b.predecessors) {
+ if (out.isDefinedAt(p)) {
+ in(b) = out(p)
+ worklist += p
+ }
+ /* else
+ in(b) = typeFlowLattice.bottom
+ */ }
+ out(b) = typeFlowLattice.bottom
+ }
}
for (handler <- m.exh) {
val start = handler.startBlock
@@ -164,7 +166,7 @@ abstract class TypeFlowAnalysis {
forwardAnalysis(blockTransfer)
val t = timer.stop
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
@@ -174,7 +176,7 @@ abstract class TypeFlowAnalysis {
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- b.foldLeft(in)(interpret)
+ b.iterator.foldLeft(in)(interpret)
}
/** The flow function of a given basic block. */
/* var flowFun: immutable.Map[BasicBlock, TransferFunction] = new immutable.HashMap */
@@ -617,6 +619,50 @@ abstract class TypeFlowAnalysis {
}
}
+ class MTFAGrowable extends MethodTFA {
+
+ import icodes._
+
+ /** discards what must be discarded, blanks what needs to be blanked out, and keeps the rest. */
+ def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: collection.Set[BasicBlock], staleIn: collection.Set[BasicBlock]) {
+ if (this.method == null || this.method.symbol != m.symbol) {
+ init(m)
+ return
+ } else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
+ // this promotes invoking reinit if in doubt, no performance degradation will ensue!
+ return;
+ }
+
+ reinit {
+ // asserts conveying an idea what CFG shapes arrive here.
+ // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
+ // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert( !in.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
+ // staleOut foreach (p => assert( in.isDefinedAt(p), p))
+
+ // never rewrite in(m.startBlock)
+ staleOut foreach { b =>
+ if(!inlined.contains(b)) { worklist += b }
+ out(b) = typeFlowLattice.bottom
+ }
+ // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
+ blankOut(inlined)
+ blankOut(staleIn)
+ // no need to add startBlocks from m.exh
+ }
+ }
+
+ private def blankOut(blocks: collection.Set[BasicBlock]) {
+ blocks foreach { b =>
+ in(b) = typeFlowLattice.bottom
+ out(b) = typeFlowLattice.bottom
+ }
+ }
+
+ }
+
class Timer {
var millis = 0L
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
index 301dbd18d6..1ba5b155fc 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
@@ -23,25 +23,21 @@ trait GenAndroid {
* `Parcelable` interface must also have a static field called `CREATOR`,
* which is an object implementing the `Parcelable.Creator` interface.
*/
- private val fieldName = "CREATOR"
+ private val fieldName = newTermName("CREATOR")
- private lazy val AndroidParcelableInterface =
- try definitions.getClass("android.os.Parcelable")
- catch { case _: FatalError => NoSymbol }
-
- private lazy val AndroidCreatorClass =
- if (AndroidParcelableInterface == NoSymbol) NoSymbol
- else definitions.getClass("android.os.Parcelable$Creator")
+ private lazy val AndroidParcelableInterface = definitions.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = definitions.getClassIfDefined("android.os.Parcelable$Creator")
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
- (sym.info.parents contains AndroidParcelableInterface.tpe)
+ (sym.parentSymbols contains AndroidParcelableInterface)
def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
import codegen._
- val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
- .setFlag(Flags.STATIC | Flags.FINAL)
- .setInfo(AndroidCreatorClass.tpe)
+ val fieldSymbol = (
+ clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ setInfo AndroidCreatorClass.tpe
+ )
val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
clasz addField new IField(fieldSymbol)
block emit CALL_METHOD(methodSymbol, Static(false))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index a2c4f16bf1..b5232fff09 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -3,7 +3,6 @@
* @author Iulian Dragos
*/
-
package scala.tools.nsc
package backend.jvm
@@ -13,6 +12,7 @@ import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.reflect.SigParser
import scala.tools.nsc.symtab._
+import scala.tools.nsc.util.{ SourceFile, NoSourceFile }
import scala.reflect.internal.ClassfileConstants._
import ch.epfl.lamp.fjbg._
import JAccessFlags._
@@ -30,13 +30,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- AnyClass, ObjectClass, ThrowsClass, ThrowableClass, ClassfileAnnotationClass,
- SerializableClass, StringClass, ClassClass, FunctionClass,
- DeprecatedAttr, SerializableAttr, SerialVersionUIDAttr, VolatileAttr,
- TransientAttr, CloneableAttr, RemoteAttr, JavaCloneableClass,
- RemoteInterfaceClass, RemoteExceptionClass, hasJavaMainMethod
- }
+ import definitions._
val phaseName = "jvm"
@@ -68,10 +62,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
def isJavaEntryPoint(clasz: IClass) = {
val sym = clasz.symbol
- def fail(msg: String) = {
+ def fail(msg: String, pos: Position = sym.pos) = {
clasz.cunit.warning(sym.pos,
- sym.name + " has a main method, but " + sym.fullName('.') + " will not be a runnable program.\n" +
- " " + msg + ", which means no static forwarder can be generated.\n"
+ sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
+ " Reason: " + msg
// TODO: make this next claim true, if possible
// by generating valid main methods as static in module classes
// not sure what the jvm allows here
@@ -79,19 +73,47 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
)
false
}
- sym.hasModuleFlag && hasJavaMainMethod(sym) && {
- // At this point we've seen a module with a main method, so if this
- // doesn't turn out to be a valid entry point, issue a warning.
- val companion = sym.linkedClassOfClass
- if (companion.isTrait)
- fail("Its companion is a trait")
- else if (hasJavaMainMethod(companion) && !(sym isSubClass companion))
- fail("Its companion contains its own main method")
- // this is only because forwarders aren't smart enough yet
- else if (companion.tpe.member(nme.main) != NoSymbol)
- fail("Its companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
- else
- true
+ def failNoForwarder(msg: String) = {
+ fail(msg + ", which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either
+ // succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ atPhase(currentRun.erasurePhase) {
+ val companion = sym.linkedClassOfClass
+ val companionMain = companion.tpe.member(nme.main)
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail("don't know what this is: " + tp, m.pos)
+ }
+ }
+ }
}
}
@@ -183,10 +205,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
// Scala attributes
- val BeanInfoAttr = definitions.getClass("scala.beans.BeanInfo")
- val BeanInfoSkipAttr = definitions.getClass("scala.beans.BeanInfoSkip")
- val BeanDisplayNameAttr = definitions.getClass("scala.beans.BeanDisplayName")
- val BeanDescriptionAttr = definitions.getClass("scala.beans.BeanDescription")
+ val BeanInfoAttr = definitions.getRequiredClass("scala.beans.BeanInfo")
+ val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
+ val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
+ val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
+
+ final val ExcludedForwarderFlags = {
+ import Flags._
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | BridgeAndPrivateFlags )
+ }
// Additional interface parents based on annotations and other cues
def newParentForAttr(attr: Symbol): Option[Type] = attr match {
@@ -291,7 +318,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(jclass.getName()) =>
+ case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
val scalaAttr =
fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
versionPickle.bytes, versionPickle.writeIndex)
@@ -355,7 +382,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (isTopLevelModule(c.symbol)) {
if (c.symbol.companionClass == NoSymbol)
- generateMirrorClass(c.symbol, c.cunit.source.toString)
+ generateMirrorClass(c.symbol, c.cunit.source)
else
log("No mirror class for module with linked class: " +
c.symbol.fullName)
@@ -754,7 +781,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
null
else {
val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(outerName)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
else outerName
}
}
@@ -849,7 +876,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (outerField != NoSymbol) {
log("Adding fake local to represent outer 'this' for closure " + clasz)
val _this = new Local(
- method.symbol.newVariable(NoPosition, nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
+ method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
m.locals = m.locals ::: List(_this)
computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
@@ -925,8 +952,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
mopt match {
case Some(m) =>
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
if (isStaticModule(clasz.symbol)) {
@@ -939,9 +966,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
serialVUID foreach { value =>
import Flags._, definitions._
val fieldName = "serialVersionUID"
- val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
- .setFlag(STATIC | FINAL)
- .setInfo(longType)
+ val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo longType
clasz addField new IField(fieldSymbol)
lastBlock emit CONSTANT(Constant(value))
lastBlock emit STORE_FIELD(fieldSymbol, true)
@@ -1044,32 +1069,20 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val className = jclass.getName
val linkedClass = moduleClass.companionClass
val linkedModule = linkedClass.companionSymbol
+ lazy val conflictingNames: Set[Name] = {
+ linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
+ }
+ debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
- /** There was a bit of a gordian logic knot here regarding forwarders.
- * All we really have to do is exclude certain categories of symbols and
- * then all matching names.
- */
- def memberNames(sym: Symbol) = sym.info.members map (_.name.toString) toSet
- lazy val membersInCommon =
- memberNames(linkedModule) intersect memberNames(linkedClass)
-
- /** Should method `m` get a forwarder in the mirror class? */
- def shouldForward(m: Symbol): Boolean = (
- m.owner != ObjectClass
- && m.isMethod
- && m.isPublic
- && !m.hasFlag(Flags.CASE | Flags.DEFERRED | Flags.SPECIALIZED | Flags.LIFTED)
- && !m.isConstructor
- && !m.isStaticMember
- && !membersInCommon(m.name.toString)
- )
-
- for (m <- moduleClass.info.nonPrivateMembers) {
- if (shouldForward(m)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
+ debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
+ else if (conflictingNames(m.name))
+ log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
addForwarder(jclass, moduleClass, m)
}
- else debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
}
}
@@ -1079,7 +1092,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* generated if there is no companion class: if there is, an attempt will
* instead be made to add the forwarder methods to the companion class.
*/
- def generateMirrorClass(clasz: Symbol, sourceFile: String) {
+ def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
import JAccessFlags._
val moduleName = javaName(clasz) // + "$"
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
@@ -1087,7 +1100,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
mirrorName,
JAVA_LANG_OBJECT.getName,
JClass.NO_INTERFACES,
- sourceFile)
+ "" + sourceFile)
log("Dumping mirror class for '%s'".format(mirrorClass.getName))
addForwarders(mirrorClass, clasz)
@@ -1199,7 +1212,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
hostSymbol.info ; methodOwner.info
def isInterfaceCall(sym: Symbol) = (
- sym.isInterface
+ sym.isInterface && methodOwner != ObjectClass
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
@@ -1901,7 +1914,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (sym.isInterface) ACC_INTERFACE else 0,
if (finalFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge || sym.hasFlag(Flags.MIXEDIN) && sym.isMethod) ACC_BRIDGE else 0,
+ if (sym.isBridge) ACC_BRIDGE else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0
)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
index acaf1f6cc2..93d3d19ac8 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -33,11 +33,11 @@ trait GenJVMUtil {
)
// Don't put this in per run caches.
- private val javaNameCache = new mutable.WeakHashMap[Symbol, String]() ++= List(
- NothingClass -> RuntimeNothingClass.fullName('/'),
- RuntimeNothingClass -> RuntimeNothingClass.fullName('/'),
- NullClass -> RuntimeNullClass.fullName('/'),
- RuntimeNullClass -> RuntimeNullClass.fullName('/')
+ private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
)
/** This trait may be used by tools who need access to
@@ -70,7 +70,6 @@ trait GenJVMUtil {
def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
/** Return the a name of this symbol that can be used on the Java
* platform. It removes spaces from names.
*
@@ -86,11 +85,13 @@ trait GenJVMUtil {
*/
def javaName(sym: Symbol): String =
javaNameCache.getOrElseUpdate(sym, {
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- })
+ sym.name.newName(
+ if (sym.isClass || (sym.isModule && !sym.isMethod))
+ sym.javaBinaryName
+ else
+ sym.javaSimpleName
+ )
+ }).toString
def javaType(t: TypeKind): JType = (t: @unchecked) match {
case UNIT => JType.VOID
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index e208f5a8da..d2e54ff3f1 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -124,8 +124,8 @@ abstract class GenMSIL extends SubComponent {
// Scala attributes
// symtab.Definitions -> object (singleton..)
val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.getClass("scala.cloneable").tpe
- val TransientAtt = definitions.getClass("scala.transient").tpe
+ val CloneableAttr = definitions.CloneableAttr.tpe
+ val TransientAtt = definitions.TransientAttr.tpe
// remoting: the architectures are too different, no mapping (no portable code
// possible)
@@ -1898,8 +1898,8 @@ abstract class GenMSIL extends SubComponent {
val sc = iclass.lookupStaticCtor
if (sc.isDefined) {
val m = sc.get
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
// call object's private ctor from static ctor
lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 3e921cf472..e8abee7d06 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -94,12 +94,12 @@ abstract class ClosureElimination extends SubComponent {
import copyPropagation._
/* Some embryonic copy propagation. */
- def analyzeMethod(m: IMethod): Unit = try {if (m.code ne null) {
+ def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
log("Analyzing " + m)
cpp.init(m)
cpp.run
- for (bb <- linearizer.linearize(m)) {
+ m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
debuglog("Cpp info at entry to block " + bb + ": " + info)
@@ -201,28 +201,25 @@ abstract class ClosureElimination extends SubComponent {
/** Peephole optimization. */
abstract class PeepholeOpt {
- private var method: IMethod = null
+ private var method: IMethod = NoIMethod
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
- def apply(m: IMethod): Unit = if (m.code ne null) {
+ def apply(m: IMethod): Unit = if (m.hasCode) {
method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
liveness.run
- for (b <- m.code.blocks)
- transformBlock(b)
+ m foreachBlock transformBlock
}
def transformBlock(b: BasicBlock): Unit = if (b.size >= 2) {
- var newInstructions: List[Instruction] = Nil
-
- newInstructions = b.toList
-
+ var newInstructions: List[Instruction] = b.toList
var redo = false
+
do {
var h = newInstructions.head
var t = newInstructions.tail
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 64df3b4636..5fc7329955 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -72,7 +72,7 @@ abstract class DeadCodeElimination extends SubComponent {
val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
- if (m.code ne null) {
+ if (m.hasCode) {
log("dead code elimination on " + m);
dropOf.clear()
m.code.blocks.clear()
@@ -90,12 +90,12 @@ abstract class DeadCodeElimination extends SubComponent {
}
/** collect reaching definitions and initial useful instructions for this method. */
- def collectRDef(m: IMethod): Unit = if (m.code ne null) {
+ def collectRDef(m: IMethod): Unit = if (m.hasCode) {
defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
rdef.init(m);
rdef.run;
- for (bb <- m.code.blocks.toList) {
+ m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
@@ -184,7 +184,7 @@ abstract class DeadCodeElimination extends SubComponent {
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
- for (bb <- m.code.blocks.toList) {
+ m foreachBlock { bb =>
// Console.println("** Sweeping block " + bb + " **")
val oldInstr = bb.toList
bb.open
@@ -223,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent {
private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
- for (bb <- m.code.blocks) {
+ m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
for ((i, idx) <- bb.toList.zipWithIndex) {
if (!useful(bb)(idx)) {
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index 1d971beae4..ec137203bf 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -79,7 +79,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
/* Type Flow Analysis */
private val tfa: analysis.MethodTFA = new analysis.MethodTFA()
private var tfaCache: Map[Int, tfa.lattice.Elem] = Map.empty
- private var analyzedMethod: IMethod = null
+ private var analyzedMethod: IMethod = NoIMethod
/* Blocks that need to be analyzed */
private var todoBlocks: List[BasicBlock] = Nil
@@ -110,7 +110,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
* inlined blocks, so worst case scenario we double the size of the code
*/
private def applyMethod(method: IMethod): Unit = {
- if (method.code ne null) {
+ if (method.hasCode) {
// create the list of starting blocks
todoBlocks = global.icodes.linearizer.linearize(method)
@@ -127,7 +127,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
todoBlocks = Nil
// Type flow analysis cleanup
- analyzedMethod = null
+ analyzedMethod = NoIMethod
tfaCache = Map.empty
//TODO: Need a way to clear tfa structures
}
@@ -151,7 +151,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
* - we change the THROW exception to the new Clear stack + JUMP code
*/
for {
- (instr @ THROW(clazz), index) <- bblock.zipWithIndex
+ (instr @ THROW(clazz), index) <- bblock.iterator.zipWithIndex
// Decide if any handler fits this exception
// If not, then nothing to do, we cannot determine statically which handler will catch the exception
(handler, caughtException) <- findExceptionHandler(toTypeKind(clazz.tpe), bblock.exceptionSuccessors)
@@ -181,7 +181,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
if (!canReplaceHandler) {
currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
- " block:\n" + bblock.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
+ " block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
"before instruction index " + index)
}
else {
@@ -203,7 +203,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
// Here we could create a single local for all exceptions of a certain type. TODO: try that.
val localName = currentClass.cunit.freshTermName("exception$")
val localType = exceptionType
- val localSymbol = bblock.method.symbol.newValue(NoPosition, localName).setInfo(localType.toType)
+ val localSymbol = bblock.method.symbol.newValue(localName).setInfo(localType.toType)
val local = new Local(localSymbol, localType, false)
bblock.method.addLocal(local)
@@ -261,13 +261,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
private def getTypesAtBlockEntry(bblock: BasicBlock): tfa.lattice.Elem = {
// lazily perform tfa, because it's expensive
// cache results by block label, as rewriting the code messes up the block's hashCode
- if (analyzedMethod eq null) {
+ if (analyzedMethod eq NoIMethod) {
analyzedMethod = bblock.method
tfa.init(bblock.method)
tfa.run
log(" performed tfa on method: " + bblock.method)
- for (block <- bblock.method.code.blocks.sortBy(_.label))
+ for (block <- bblock.method.blocks.sortBy(_.label))
tfaCache += block.label -> tfa.in(block)
}
@@ -360,7 +360,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
val copy = handler.code.newBlock
- copy.emitOnly(handler drop dropCount: _*)
+ copy.emitOnly(handler.iterator drop dropCount toSeq: _*)
// extend the handlers of the handler to the copy
for (parentHandler <- handler.method.exh ; if parentHandler covers handler) {
@@ -382,7 +382,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
case _ =>
currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
- handler.mkString("\n"))
+ handler.iterator.mkString("\n"))
None
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index d56a0740d9..66f802f74f 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -9,6 +9,7 @@ package backend.opt
import scala.collection.mutable
import scala.tools.nsc.symtab._
+import scala.tools.nsc.util.{ NoSourceFile }
/**
* @author Iulian Dragos
@@ -61,6 +62,15 @@ abstract class Inliners extends SubComponent {
override def apply(c: IClass) {
inliner analyzeClass c
}
+
+ override def run() {
+ try {
+ super.run()
+ } finally {
+ inliner.NonPublicRefs.usesNonPublics.clear()
+ inliner.recentTFAs.clear
+ }
+ }
}
def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
@@ -78,17 +88,10 @@ abstract class Inliners extends SubComponent {
val Private, Protected, Public = Value
/** Cache whether a method calls private members. */
- val usesNonPublics: mutable.Map[IMethod, Value] = perRunCaches.newMap()
+ val usesNonPublics = mutable.Map.empty[IMethod, Value]
}
import NonPublicRefs._
- /* fresh name counter */
- val fresh = perRunCaches.newMap[String, Int]() withDefaultValue 0
- def freshName(s: String) = {
- fresh(s) += 1
- s + fresh(s)
- }
-
private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
@@ -96,27 +99,60 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
+ val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
+ private def getRecentTFA(incm: IMethod): (Boolean, analysis.MethodTFA) = {
+
+ def containsRETURN(blocks: List[BasicBlock]) = blocks exists { bb => bb.lastInstruction.isInstanceOf[RETURN] }
+
+ val opt = recentTFAs.get(incm.symbol)
+ if(opt.isDefined) {
+ // FYI val cachedBBs = opt.get._2.in.keySet
+ // FYI assert(incm.blocks.toSet == cachedBBs)
+ // incm.code.touched plays no role here
+ return opt.get
+ }
+
+ val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
+ var a: analysis.MethodTFA = null
+ if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+
+ if(hasInline(incm.symbol)) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
+
+ (hasRETURN, a)
+ }
+
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
debuglog("Analyzing " + cls)
this.currentIClazz = cls
- cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ val ms = cls.methods filterNot { _.symbol.isConstructor }
+ ms foreach { im =>
+ if(hasInline(im.symbol)) {
+ log("Not inlining into " + im.symbol.originalName.decode + " because it is marked @inline.")
+ } else if(im.hasCode && !im.symbol.isBridge) {
+ analyzeMethod(im)
+ }
+ }
}
- val tfa = new analysis.MethodTFA()
+ val tfa = new analysis.MTFAGrowable()
tfa.stat = global.opt.printStats
-
- // how many times have we already inlined this method here?
- private val inlinedMethodCount = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
+ val staleOut = new mutable.ListBuffer[BasicBlock]
+ val splicedBlocks = mutable.Set.empty[BasicBlock]
+ val staleIn = mutable.Set.empty[BasicBlock]
def analyzeMethod(m: IMethod): Unit = {
- var sizeBeforeInlining = if (m.code ne null) m.code.blockCount else 0
- var instrBeforeInlining = if (m.code ne null) m.code.instructionCount else 0
+ var sizeBeforeInlining = m.code.blockCount
+ var instrBeforeInlining = m.code.instructionCount
var retry = false
var count = 0
- fresh.clear()
- inlinedMethodCount.clear()
+
+ // fresh name counter
+ val fresh = mutable.HashMap.empty[String, Int] withDefaultValue 0
+ // how many times have we already inlined this method here?
+ val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
+
val caller = new IMethodInfo(m)
var info: tfa.lattice.Elem = null
@@ -135,17 +171,20 @@ abstract class Inliners extends SubComponent {
warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
}
- if (shouldLoadImplFor(concreteMethod, receiver)) {
+ def isAvailable = icodes available concreteMethod.enclClass
+
+ if (!isAvailable && shouldLoadImplFor(concreteMethod, receiver)) {
// Until r22824 this line was:
// icodes.icode(concreteMethod.enclClass, true)
//
- // Changing it to the below was the proximate cause for SI-3882:
+ // Changing it to
+ // icodes.load(concreteMethod.enclClass)
+ // was the proximate cause for SI-3882:
// error: Illegal index: 0 overlaps List((variable par1,LONG))
// error: Illegal index: 0 overlaps List((variable par1,LONG))
icodes.load(concreteMethod.enclClass)
}
- def isAvailable = icodes available concreteMethod.enclClass
def isCandidate = (
isClosureClass(receiver)
|| concreteMethod.isEffectivelyFinal
@@ -168,7 +207,7 @@ abstract class Inliners extends SubComponent {
lookupIMethod(concreteMethod, receiver) match {
case Some(callee) =>
val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc)
+ val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
if (pair isStampedForInlining info.stack) {
retry = true
@@ -185,6 +224,7 @@ abstract class Inliners extends SubComponent {
* might have changed after the inlining.
*/
usesNonPublics -= m
+ recentTFAs -= m.symbol
}
else {
if (settings.debug.value)
@@ -207,34 +247,35 @@ abstract class Inliners extends SubComponent {
import scala.util.control.Breaks._
do {
retry = false
- if (caller.inline) {
- log("Not inlining into " + caller.sym.originalName.decode + " because it is marked @inline.")
- }
- else if (caller.hasCode) {
- log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
- tfa init m
- tfa.run
- caller.linearized foreach { bb =>
- info = tfa in bb
-
- breakable {
- for (i <- bb) {
- i match {
- // Dynamic == normal invocations
- // Static(true) == calls to private members
- case CALL_METHOD(msym, Dynamic | Static(true)) if !msym.isConstructor =>
- if (analyzeInc(msym, i, bb))
- break
- case _ => ()
- }
- info = tfa.interpret(info, i)
+ log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
+ tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
+ tfa.run
+ staleOut.clear()
+ splicedBlocks.clear()
+ staleIn.clear()
+
+ caller.m.linearizedBlocks() foreach { bb =>
+ info = tfa in bb
+
+ breakable {
+ for (i <- bb) {
+ i match {
+ // Dynamic == normal invocations
+ // Static(true) == calls to private members
+ case CALL_METHOD(msym, Dynamic | Static(true)) if !msym.isConstructor =>
+ if (analyzeInc(msym, i, bb)) {
+ break
+ }
+ case _ => ()
}
+ info = tfa.interpret(info, i)
}
}
- if (tfa.stat)
- log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
}
+
+ if (tfa.stat)
+ log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
}
while (retry && count < MAX_INLINE_RETRY)
@@ -303,26 +344,23 @@ abstract class Inliners extends SubComponent {
def inline = hasInline(sym)
def noinline = hasNoInline(sym)
- def numInlined = inlinedMethodCount(sym)
def isBridge = sym.isBridge
def isInClosure = isClosureClass(owner)
- def isHigherOrder = isHigherOrderMethod(sym)
+ val isHigherOrder = isHigherOrderMethod(sym)
def isMonadic = isMonadicMethod(sym)
def handlers = m.exh
- def blocks = if (m.code eq null) sys.error("blocks = null + " + m) else m.code.blocks
+ def blocks = m.blocks
def locals = m.locals
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
- def instructions = blocks.flatten
- def linearized = linearizer linearize m
+ def instructions = m.code.instructions
+ // def linearized = linearizer linearize m
def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
- def hasCode = m.code != null
- def hasSourceFile = m.sourceFile != null
def hasHandlers = handlers.nonEmpty
def hasNonFinalizerHandler = handlers exists {
case _: Finalizer => true
@@ -337,21 +375,29 @@ abstract class Inliners extends SubComponent {
def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
}
- class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo) {
+ class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: collection.Map[Symbol, Int]) {
def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
+ private def freshName(s: String): TermName = {
+ fresh(s) += 1
+ newTermName(s + fresh(s))
+ }
+
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
def doInline(block: BasicBlock, instr: Instruction) {
+
+ staleOut += block
+
val targetPos = instr.pos
log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
def blockEmit(i: Instruction) = block.emit(i, targetPos)
def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(targetPos, freshName(baseName)), kind, false)
+ new Local(caller.sym.newVariable(freshName(baseName), targetPos), kind, false)
- val a = new analysis.MethodTFA(inc.m)
+ val (hasRETURN, a) = getRecentTFA(inc.m)
/* The exception handlers that are active at the current block. */
val activeHandlers = caller.handlers filter (_ covered block)
@@ -380,7 +426,7 @@ abstract class Inliners extends SubComponent {
case x => newLocal("$retVal", x)
}
- val inlinedLocals = perRunCaches.newMap[Local, Local]()
+ val inlinedLocals = mutable.HashMap.empty[Local, Local]
/** Add a new block in the current context. */
def newBlock() = {
@@ -401,7 +447,7 @@ abstract class Inliners extends SubComponent {
/** alfa-rename `l` in caller's context. */
def dupLocal(l: Local): Local = {
- val sym = caller.sym.newVariable(l.sym.pos, freshName(l.sym.name.toString))
+ val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
// sym.setInfo(l.sym.tpe)
val dupped = new Local(sym, l.kind, false)
inlinedLocals(l) = dupped
@@ -457,14 +503,11 @@ abstract class Inliners extends SubComponent {
if (retVal ne null)
caller addLocal retVal
- inc.blocks foreach { b =>
+ inc.m foreachBlock { b =>
inlinedBlock += (b -> newBlock())
inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
}
- // analyse callee
- a.run
-
// re-emit the instructions before the call
block.open
block.clear
@@ -475,12 +518,13 @@ abstract class Inliners extends SubComponent {
blockEmit(STORE_LOCAL(inlinedThis))
// jump to the start block of the callee
- blockEmit(JUMP(inlinedBlock(inc.m.code.startBlock)))
+ blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
block.close
// duplicate the other blocks in the callee
- linearizer linearize inc.m foreach { bb =>
- var info = a in bb
+ val calleeLin = inc.m.linearizedBlocks()
+ calleeLin foreach { bb =>
+ var info = if(hasRETURN) (a in bb) else null
def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
@@ -496,7 +540,7 @@ abstract class Inliners extends SubComponent {
case _ => ()
}
emitInlined(map(i))
- info = a.interpret(info, i)
+ info = if(hasRETURN) a.interpret(info, i) else null
}
inlinedBlock(bb).close
}
@@ -504,6 +548,9 @@ abstract class Inliners extends SubComponent {
afterBlock emit instrAfter
afterBlock.close
+ staleIn += afterBlock
+ splicedBlocks ++= (calleeLin map inlinedBlock)
+
// add exception handlers of the callee
caller addHandlers (inc.handlers map translateExh)
assert(pending.isEmpty, "Pending NEW elements: " + pending)
@@ -511,23 +558,23 @@ abstract class Inliners extends SubComponent {
}
def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.hasCode && shouldInline && isSafeToInline(stack)
+ !sameSymbols && inc.m.hasCode && shouldInline && isSafeToInline(stack)
def logFailure(stack: TypeStack) = log(
"""|inline failed for %s:
| pair.sameSymbols: %s
| inc.numInlined < 2: %s
- | inc.hasCode: %s
+ | inc.m.hasCode: %s
| isSafeToInline: %s
| shouldInline: %s
""".stripMargin.format(
- inc.m, sameSymbols, inc.numInlined < 2,
- inc.hasCode, isSafeToInline(stack), shouldInline
+ inc.m, sameSymbols, inlinedMethodCount(inc.sym) < 2,
+ inc.m.hasCode, isSafeToInline(stack), shouldInline
)
)
def failureReason(stack: TypeStack) =
- if (!inc.hasCode) "bytecode was unavailable"
+ if (!inc.m.hasCode) "bytecode was unavailable"
else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
else "of a bug (run with -Ylog:inline -Ydebug for more information)"
@@ -550,14 +597,14 @@ abstract class Inliners extends SubComponent {
*/
def isSafeToInline(stack: TypeStack): Boolean = {
def makePublic(f: Symbol): Boolean =
- inc.hasSourceFile && (f.isSynthetic || f.isParamAccessor) && {
+ (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
debuglog("Making not-private symbol out of synthetic: " + f)
f setNotFlag Flags.PRIVATE
true
}
- if (!inc.hasCode || inc.isRecursive)
+ if (!inc.m.hasCode || inc.isRecursive)
return false
val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
@@ -610,7 +657,7 @@ abstract class Inliners extends SubComponent {
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- private def neverInline = caller.isBridge || !inc.hasCode || inc.noinline
+ private def neverInline = caller.isBridge || !inc.m.hasCode || inc.noinline
private def alwaysInline = inc.inline
def shouldInline: Boolean = !neverInline && (alwaysInline || {
@@ -618,28 +665,22 @@ abstract class Inliners extends SubComponent {
var score = 0
- // better not inline inside closures, but hope that the closure itself
- // is repeatedly inlined
- if (caller.isInClosure) score -= 2
+ // better not inline inside closures, but hope that the closure itself is repeatedly inlined
+ if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall)
- score += 1
+ if (inc.isSmall) score += 1;
+ if (inc.isLarge) score -= 1;
if (caller.isSmall && isLargeSum) {
score -= 1
debuglog("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
}
- if (inc.isLarge)
- score -= 1
- if (inc.isMonadic)
- score += 3
- else if (inc.isHigherOrder)
- score += 1
- if (inc.isInClosure)
- score += 2
- if (inc.numInlined > 2)
- score -= 2
+ if (inc.isMonadic) score += 3
+ else if (inc.isHigherOrder) score += 1
+
+ if (inc.isInClosure) score += 2;
+ if (inlinedMethodCount(inc.sym) > 2) score -= 2;
log("shouldInline(" + inc.m + ") score: " + score)
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 4c7263ef69..089ef9cf35 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,8 +18,8 @@ abstract class Changes {
abstract class Change
- private lazy val annotationsChecked =
- List(definitions.getClass("scala.specialized")) // Any others that should be checked?
+ private lazy val annotationsChecked =
+ List(definitions.SpecializedClass) // Any others that should be checked?
private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD |
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 5a510803ed..9a025b0d14 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -96,7 +96,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
- reporter.info(NoPosition, "No documentation generated with unsucessful compiler run", false)
+ reporter.info(null, "No documentation generated with unsucessful compiler run", false)
case _: ClassNotFoundException =>
()
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index c21507ef45..4f05678d85 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -80,9 +80,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected.png",
"selected2-right.png",
"selected2.png",
- "unselected.png",
-
- "rootdoc.txt"
+ "unselected.png"
)
/** Generates the Scaladoc site for a model into the site root.
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index ea36eb03c7..efa524503c 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -196,11 +196,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
/** The start of a scaladoc code block */
protected val CodeBlockStart =
- new Regex("""(.*)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
+ new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
/** The end of a scaladoc code block */
protected val CodeBlockEnd =
- new Regex("""(.*)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
+ new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
/** A key used for a tag map. The key is built from the name of the tag and
* from the linked symbol if the tag has one.
@@ -250,7 +250,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
* @param remaining The lines that must still recursively be parsed.
* @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
def parse0 (
- docBody: String,
+ docBody: StringBuilder,
tags: Map[TagKey, List[String]],
lastTagKey: Option[TagKey],
remaining: List[String],
@@ -258,9 +258,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
): Comment = remaining match {
case CodeBlockStart(before, marker, after) :: ls if (!inCodeBlock) =>
- if (before.trim != "")
- parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, false)
- else if (after.trim != "")
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
+ else if (!before.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
+ else if (!after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
else lastTagKey match {
case Some(key) =>
@@ -271,24 +273,26 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
case None =>
- parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, true)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
}
case CodeBlockEnd(before, marker, after) :: ls =>
- if (before.trim != "")
- parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, true)
- else if (after.trim != "")
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
+ if (!before.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
+ else if (!after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
else lastTagKey match {
case Some(key) =>
val value =
((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + "}}}") :: bs
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
case None => oops("lastTagKey set when no tag exists for key")
}
parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
case None =>
- parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, false)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
}
case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
@@ -311,8 +315,9 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
case line :: ls =>
- val newBody = if (docBody == "") line else docBody + endOfLine + line
- parse0(newBody, tags, lastTagKey, ls, inCodeBlock)
+ if (docBody.length > 0) docBody append endOfLine
+ docBody append line
+ parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
case Nil =>
@@ -350,7 +355,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
val com = createComment (
- body0 = Some(parseWiki(docBody, pos)),
+ body0 = Some(parseWiki(docBody.toString, pos)),
authors0 = allTags(SimpleTagKey("author")),
see0 = allTags(SimpleTagKey("see")),
result0 = oneTag(SimpleTagKey("return")),
@@ -374,7 +379,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
- parse0("", Map.empty, None, clean(comment), false)
+ parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
}
@@ -385,7 +390,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
* - Removed all end-of-line whitespace.
* - Only `endOfLine` is used to mark line endings. */
def parseWiki(string: String, pos: Position): Body = {
- new WikiParser(string.toArray, pos).document()
+ new WikiParser(string, pos).document()
}
/** TODO
@@ -393,7 +398,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
* @author Ingo Maier
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
- protected final class WikiParser(val buffer: Array[Char], pos: Position) extends CharReader(buffer) { wiki =>
+ protected final class WikiParser(val buffer: String, pos: Position) extends CharReader(buffer) { wiki =>
var summaryParsed = false
@@ -411,7 +416,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
def block(): Block = {
if (checkSkipInitWhitespace("{{{"))
code()
- else if (checkSkipInitWhitespace("="))
+ else if (checkSkipInitWhitespace('='))
title()
else if (checkSkipInitWhitespace("----"))
hrule()
@@ -493,7 +498,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
def title(): Block = {
jumpWhitespace()
val inLevel = repeatJump("=")
- val text = inline(check(Array.fill(inLevel)('=')))
+ val text = inline(check("=" * inLevel))
val outLevel = repeatJump("=", inLevel)
if (inLevel != outLevel)
reportError(pos, "unbalanced or unclosed heading")
@@ -734,11 +739,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
val pc = char
nextChar() // read EOL
val ok = {
- checkSkipInitWhitespace(Array(endOfLine)) ||
- checkSkipInitWhitespace(Array('=')) ||
- checkSkipInitWhitespace(Array('{', '{', '{')) ||
+ checkSkipInitWhitespace(endOfLine) ||
+ checkSkipInitWhitespace('=') ||
+ checkSkipInitWhitespace("{{{") ||
checkList ||
- checkSkipInitWhitespace(Array('\u003D'))
+ checkSkipInitWhitespace('\u003D')
}
offset = poff
char = pc
@@ -751,7 +756,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
}
}
- protected sealed class CharReader(buffer: Array[Char]) { reader =>
+ protected sealed class CharReader(buffer: String) { reader =>
var char: Char = _
var offset: Int = 0
@@ -760,36 +765,37 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
if (offset >= buffer.length)
char = endOfText
else {
- char = buffer(offset)
+ char = buffer charAt offset
offset += 1
}
}
- implicit def strintToChars(s: String): Array[Char] = s.toArray
-
- def store(body: => Unit): String = {
- val pre = offset
- body
- val post = offset
- buffer.toArray.slice(pre, post).toString
+ final def check(chars: String): Boolean = {
+ val poff = offset
+ val pc = char
+ val ok = jump(chars)
+ offset = poff
+ char = pc
+ ok
}
- final def check(chars: Array[Char]): Boolean = {
+ def checkSkipInitWhitespace(c: Char): Boolean = {
val poff = offset
val pc = char
- val ok = jump(chars)
+ jumpWhitespace()
+ val ok = jump(c)
offset = poff
char = pc
ok
}
- def checkSkipInitWhitespace(chars: Array[Char]): Boolean = {
+ def checkSkipInitWhitespace(chars: String): Boolean = {
val poff = offset
val pc = char
jumpWhitespace()
val (ok0, chars0) =
- if (chars.head == ' ')
- (offset > poff, chars.tail)
+ if (chars.charAt(0) == ' ')
+ (offset > poff, chars substring 1)
else
(true, chars)
val ok = ok0 && jump(chars0)
@@ -825,16 +831,16 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
/** jumps all the characters in chars, consuming them in the process.
* @return true only if the correct characters have been jumped */
- final def jump(chars: Array[Char]): Boolean = {
+ final def jump(chars: String): Boolean = {
var index = 0
- while (index < chars.length && char == chars(index) && char != endOfText) {
+ while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
nextChar()
index += 1
}
index == chars.length
}
- final def checkedJump(chars: Array[Char]): Boolean = {
+ final def checkedJump(chars: String): Boolean = {
val poff = offset
val pc = char
val ok = jump(chars)
@@ -845,7 +851,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
ok
}
- final def repeatJump(chars: Array[Char], max: Int): Int = {
+ final def repeatJump(chars: String, max: Int): Int = {
var count = 0
var more = true
while (more && count < max) {
@@ -857,7 +863,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
count
}
- final def repeatJump(chars: Array[Char]): Int = {
+ final def repeatJump(chars: String): Int = {
var count = 0
var more = true
while (more) {
@@ -878,10 +884,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
count
}
- final def jumpUntil(chars: Array[Char]): Int = {
+ final def jumpUntil(chars: String): Int = {
assert(chars.length > 0)
var count = 0
- val c = chars(0)
+ val c = chars.charAt(0)
while (!check(chars) && char != endOfText) {
nextChar()
while (char != c && char != endOfText) {
@@ -922,10 +928,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
count
}
- final def readUntil(chars: Array[Char]): Int = {
+ final def readUntil(chars: String): Int = {
assert(chars.length > 0)
var count = 0
- val c = chars(0)
+ val c = chars.charAt(0)
while (!check(chars) && char != endOfText) {
readBuilder += char
nextChar()
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index f900859f46..477cec8c8e 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -818,7 +818,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decode.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
val symtpe = pre.memberType(sym) onTypeError ErrorType
matching(sym, symtpe, this(sym.name)) match {
case Some(m) =>
@@ -1060,6 +1060,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+ // OnTypeError should still catch TypeError because of cyclic references,
+ // but DivergentImplicit shouldn't leak anymore here
class OnTypeError[T](op: => T) {
def onTypeError(alt: => T) = try {
op
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index 2b6e793c5c..b7a9c7329c 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -101,7 +101,7 @@ trait Picklers { self: Global =>
if (sym1.isOverloaded) {
val index = sym1.alternatives.indexOf(sym)
assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
- buf += index.toString
+ buf += newTermName(index.toString)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 81d4faa36e..1d78cc6e1c 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -37,7 +37,7 @@ object REPL {
reporter = new ConsoleReporter(settings)
val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else {
try {
object compiler extends Global(command.settings, reporter) {
@@ -48,7 +48,7 @@ object REPL {
return
}
if (command.shouldStopWithInfo) {
- reporter.info(null, command.getInfoMessage(compiler), true)
+ reporter.echo(command.getInfoMessage(compiler))
} else {
run(compiler)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index 14cbdf2f3b..f251fd83fb 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -166,10 +166,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
changesOf(oldSym) = (changes ++ changesErasure).distinct
case _ =>
// a new top level definition
- changesOf(sym) =
- sym.info.parents.filter(_.typeSymbol.isSealed).map(
- p => changeChangeSet(p.typeSymbol,
- sym+" extends a sealed "+p.typeSymbol))
+ changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
+ changeChangeSet(p, sym+" extends a sealed "+p))
}
}
// Create a change for the top level classes that were removed
@@ -242,7 +240,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
for ((oldSym, changes) <- changesOf; change <- changes) {
def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.info.parents.exists(_.typeSymbol.fullName == oldSym.fullName)
+ val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
// if (settings.buildmanagerdebug.value)
// compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
change match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
index 3bc4e1cbe1..3a605975f4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -20,6 +20,7 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
with ScalaClassLoader
{
// private val defined = mutable.Map[String, Class[_]]()
+
override protected def trace =
sys.props contains "scala.debug.classloader"
@@ -43,6 +44,22 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
}
}
+ protected def dirNameToPath(name: String): String =
+ name.replace('.', '/')
+
+ protected def findAbstractDir(name: String): AbstractFile = {
+ var file: AbstractFile = root
+ val pathParts = dirNameToPath(name) split '/'
+
+ for (dirPart <- pathParts) {
+ file = file.lookupName(dirPart, true)
+ if (file == null)
+ return null
+ }
+
+ return file
+ }
+
override def getResourceAsStream(name: String) = findAbstractFile(name) match {
case null => super.getResourceAsStream(name)
case file => file.input
@@ -74,4 +91,24 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
// case null => super.getResource(name)
// case file => new URL(...)
// }
+
+ private val packages = mutable.Map[String, Package]()
+
+ override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
+ throw new UnsupportedOperationException()
+ }
+
+ override def getPackage(name: String): Package = {
+ findAbstractDir(name) match {
+ case null => super.getPackage(name)
+ case file => packages.getOrElseUpdate(name, {
+ val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
+ ctor.setAccessible(true)
+ ctor.newInstance(name, null, null, null, null, null, null, null, this)
+ })
+ }
+ }
+
+ override def getPackages(): Array[Package] =
+ root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
index 2c556656ca..d889cadf47 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
@@ -12,6 +12,7 @@ trait Dossiers {
import intp._
import intp.global._
+ import definitions._
trait Dossier {
def symbol: Symbol
@@ -34,7 +35,7 @@ trait Dossiers {
class TermDossier(val symbol: TermSymbol, val staticType: Type, val value: AnyRef) extends Dossier {
def runtimeClass: JClass = value.getClass
- def runtimeSymbol: Symbol = safeClass(runtimeClass.getName) getOrElse NoSymbol
+ def runtimeSymbol: Symbol = getClassIfDefined(runtimeClass.getName)
def runtimeType: Type = runtimeSymbol.tpe
def runtimeTypeString = TypeStrings.fromClazz(runtimeClass)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index 115cef7f00..39a1a406ba 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -15,6 +15,7 @@ trait ExprTyper {
import repl._
import replTokens.{ Tokenizer }
import global.{ reporter => _, Import => _, _ }
+ import definitions._
import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
import naming.freshInternalVarName
@@ -61,7 +62,7 @@ trait ExprTyper {
else Some(trees)
}
}
- def tokens(line: String) = beSilentDuring(codeParser.tokens(line))
+ def tokens(line: String) = beQuietDuring(codeParser.tokens(line))
// TODO: integrate these into a CodeHandler[Type].
@@ -70,30 +71,29 @@ trait ExprTyper {
// 2) A path loadable via getModule.
// 3) Try interpreting it as an expression.
private var typeOfExpressionDepth = 0
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
+ def typeOfExpression(expr: String, silent: Boolean = true): Type = {
repltrace("typeOfExpression(" + expr + ")")
if (typeOfExpressionDepth > 2) {
repldbg("Terminating typeOfExpression recursion for expression: " + expr)
- return None
+ return NoType
}
- def asQualifiedImport = {
+ def asQualifiedImport: Type = {
val name = expr.takeWhile(_ != '.')
- importedTermNamed(name) flatMap { sym =>
- typeOfExpression(sym.fullName + expr.drop(name.length), true)
- }
+ typeOfExpression(importedTermNamed(name).fullName + expr.drop(name.length), true)
}
- def asModule = safeModule(expr) map (_.tpe)
- def asExpr = {
+ def asModule: Type = getModuleIfDefined(expr).tpe
+ def asExpr: Type = {
val lhs = freshInternalVarName()
val line = "lazy val " + lhs + " =\n" + expr
interpret(line, true) match {
case IR.Success => typeOfExpression(lhs, true)
- case _ => None
+ case _ => NoType
}
}
- def evaluate() = {
+
+ def evaluate(): Type = {
typeOfExpressionDepth += 1
try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
finally typeOfExpressionDepth -= 1
@@ -107,26 +107,27 @@ trait ExprTyper {
if (!silent)
evaluate()
- None
+ NoType
}
}
// Since people will be giving us ":t def foo = 5" even though that is not an
// expression, we have a means of typing declarations too.
- private def typeOfDeclaration(code: String): Option[Type] = {
+ private def typeOfDeclaration(code: String): Type = {
repltrace("typeOfDeclaration(" + code + ")")
val obname = freshInternalVarName()
interpret("object " + obname + " {\n" + code + "\n}\n", true) match {
case IR.Success =>
val sym = symbolOfTerm(obname)
- if (sym == NoSymbol) None else {
+ if (sym == NoSymbol) NoType else {
// TODO: bitmap$n is not marked synthetic.
val decls = sym.tpe.decls.toList filterNot (x => x.isConstructor || x.isPrivate || (x.name.toString contains "$"))
repltrace("decls: " + decls)
- decls.lastOption map (decl => typeCleanser(sym, decl.name))
+ if (decls.isEmpty) NoType
+ else typeCleanser(sym, decls.last.name)
}
case _ =>
- None
+ NoType
}
}
// def compileAndTypeExpr(expr: String): Option[Typer] = {
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 391d5ab8ee..7c71438b98 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -11,6 +11,7 @@ import java.io.{ BufferedReader, FileReader }
import java.util.concurrent.locks.ReentrantLock
import scala.sys.process.Process
import session._
+import scala.util.Properties.{ jdkHome, javaVersion }
import scala.tools.util.{ Signallable, Javap }
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
@@ -51,7 +52,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp.reporter.printMessage(msg)
def isAsync = !settings.Yreplsync.value
- lazy val power = Power(this)
+ lazy val power = new Power(intp, new StdReplVals(this))
+ lazy val NoType = intp.global.NoType
// TODO
// object opt extends AestheticSettings
@@ -108,7 +110,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
class ILoopInterpreter extends IMain(settings, out) {
outer =>
- private class ThreadStoppingLineManager extends Line.Manager(parentClassLoader) {
+ private class ThreadStoppingLineManager(classLoader: ClassLoader) extends Line.Manager(classLoader) {
override def onRunaway(line: Line[_]): Unit = {
val template = """
|// She's gone rogue, captain! Have to take her out!
@@ -124,8 +126,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
override lazy val formatting = new Formatting {
def prompt = ILoop.this.prompt
}
- override protected def createLineManager(): Line.Manager =
- new ThreadStoppingLineManager
+ override protected def createLineManager(classLoader: ClassLoader): Line.Manager =
+ new ThreadStoppingLineManager(classLoader)
override protected def parentClassLoader =
settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
@@ -253,6 +255,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Power user commands */
lazy val powerCommands: List[LoopCommand] = List(
nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
+ nullary("vals", "gives information about the power mode repl vals", valsCommand),
cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
|:wrap
@@ -283,6 +286,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
history.asStrings takeRight 30 foreach echo
in.redrawLine()
}
+ private def valsCommand(): Result = power.valsDescription
private val typeTransforms = List(
"scala.collection.immutable." -> "immutable.",
@@ -375,14 +379,29 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
+ private def findToolsJar() = {
+ val jdkPath = Directory(jdkHome)
+ val jar = jdkPath / "lib" / "tools.jar" toFile;
+
+ if (jar isFile)
+ Some(jar)
+ else if (jdkPath.isDirectory)
+ jdkPath.deepFiles find (_.name == "tools.jar")
+ else None
+ }
private def addToolsJarToLoader() = {
- val javaHome = Directory(sys.env("JAVA_HOME"))
- val tools = javaHome / "lib" / "tools.jar"
- if (tools.isFile) {
- echo("Found tools.jar, adding for use by javap.")
- ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ val cl = findToolsJar match {
+ case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ case _ => intp.classLoader
+ }
+ if (Javap.isAvailable(cl)) {
+ repldbg(":javap available.")
+ cl
+ }
+ else {
+ repldbg(":javap unavailable: no tools.jar at " + jdkHome)
+ intp.classLoader
}
- else intp.classLoader
}
protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
@@ -418,9 +437,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// Still todo: modules.
private def typeCommand(line: String): Result = {
if (line.trim == "") ":type <expression>"
- else intp.typeOfExpression(line, false) match {
- case Some(tp) => intp.afterTyper(tp.toString)
- case _ => "" // the error message was already printed
+ else {
+ val tp = intp.typeOfExpression(line, false)
+ if (tp == NoType) "" // the error message was already printed
+ else intp.afterTyper(tp.toString)
}
}
private def warningsCommand(): Result = {
@@ -429,14 +449,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def javapCommand(line: String): Result = {
if (javap == null)
- return ":javap unavailable on this platform."
- if (line == "")
- return ":javap [-lcsvp] [path1 path2 ...]"
-
- javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
- else res.show()
- }
+ ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ else if (javaVersion startsWith "1.7")
+ ":javap not yet working with java 1.7"
+ else if (line == "")
+ ":javap [-lcsvp] [path1 path2 ...]"
+ else
+ javap(words(line)) foreach { res =>
+ if (res.isError) return "Failed: " + res.value
+ else res.show()
+ }
}
private def keybindingsCommand(): Result = {
if (in.keyBindings.isEmpty) "Key bindings unavailable."
@@ -465,13 +487,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
case wrapper :: Nil =>
intp.typeOfExpression(wrapper) match {
- case Some(PolyType(List(targ), MethodType(List(arg), restpe))) =>
+ case PolyType(List(targ), MethodType(List(arg), restpe)) =>
intp setExecutionWrapper intp.pathToTerm(wrapper)
"Set wrapper to '" + wrapper + "'"
- case Some(x) =>
- failMsg + "\nFound: " + x
- case _ =>
- failMsg + "\nFound: <unknown>"
+ case tp =>
+ failMsg + (
+ if (tp == g.NoType) "\nFound: <unknown>"
+ else "\nFound: <unknown>"
+ )
}
case _ => failMsg
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 8e4ff8aa37..de408f083f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -188,7 +188,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
import global._
import definitions.{
ScalaPackage, JavaLangPackage, PredefModule, RootClass,
- getClassIfDefined, getModuleIfDefined
+ getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass
}
private implicit def privateTreeOps(t: Tree): List[Tree] = {
@@ -197,6 +197,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}).toList
}
+ implicit def installReplTypeOps(tp: Type): ReplTypeOps = new ReplTypeOps(tp)
+ class ReplTypeOps(tp: Type) {
+ def orElse(other: => Type): Type = if (tp ne NoType) tp else other
+ def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+ }
+
// TODO: If we try to make naming a lazy val, we run into big time
// scalac unhappiness with what look like cycles. It has not been easy to
// reduce, but name resolution clearly takes different paths.
@@ -204,12 +210,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val global: imain.global.type = imain.global
} with Naming {
// make sure we don't overwrite their unwisely named res3 etc.
- override def freshUserVarName(): String = {
- val name = super.freshUserVarName()
- if (definedNameMap contains name) freshUserVarName()
+ def freshUserTermName(): TermName = {
+ val name = newTermName(freshUserVarName())
+ if (definedNameMap contains name) freshUserTermName()
else name
}
- def isInternalVarName(name: Name): Boolean = isInternalVarName("" + name)
+ def isUserTermName(name: Name) = isUserVarName("" + name)
+ def isInternalTermName(name: Name) = isInternalVarName("" + name)
}
import naming._
@@ -262,7 +269,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Create a line manager. Overridable. */
protected def noLineManager = ReplPropsKludge.noThreadCreation(settings)
- protected def createLineManager(): Line.Manager = new Line.Manager(_classLoader)
+ protected def createLineManager(classLoader: ClassLoader): Line.Manager = new Line.Manager(classLoader)
/** Instantiate a compiler. Overridable. */
protected def newCompiler(settings: Settings, reporter: Reporter) = {
@@ -297,7 +304,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
final def ensureClassLoader() {
if (_classLoader == null) {
_classLoader = makeClassLoader()
- _lineManager = if (noLineManager) null else createLineManager()
+ _lineManager = if (noLineManager) null else createLineManager(_classLoader)
}
}
def classLoader: AbstractFileClassLoader = {
@@ -359,7 +366,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private def mostRecentlyHandledTree: Option[Tree] = {
prevRequests.reverse foreach { req =>
req.handlers.reverse foreach {
- case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name) => return Some(x.member)
+ case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
case _ => ()
}
}
@@ -501,7 +508,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
trees.last match {
case _:Assign => // we don't want to include assignments
case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
- val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
+ val varName = if (synthetic) freshInternalVarName() else ("" + freshUserTermName())
val rewrittenLine = (
// In theory this would come out the same without the 1-specific test, but
// it's a cushion against any more sneaky parse-tree position vs. code mismatches:
@@ -643,7 +650,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def directBind(name: String, boundType: String, value: Any): IR.Result = {
val result = bind(name, boundType, value)
if (result == IR.Success)
- directlyBoundNames += name
+ directlyBoundNames += newTermName(name)
result
}
def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
@@ -651,7 +658,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def rebind(p: NamedParam): IR.Result = {
val name = p.name
- val oldType = typeOfTerm(name) getOrElse { return IR.Error }
+ val oldType = typeOfTerm(name) orElse { return IR.Error }
val newType = p.tpe
val tempName = freshInternalVarName()
@@ -666,7 +673,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
+ def bindValue(x: Any): IR.Result = bindValue("" + freshUserTermName(), x)
def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
@@ -789,7 +796,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* following accessPath into the outer one.
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = definitions.getModule(readPath) // the outermost wrapper
+ val readRoot = getRequiredModule(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot) { (sym, name) =>
if (name == "") sym else
lineAfterTyper(sym.info member newTermName(name))
@@ -824,7 +831,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
}
private def compileAndSaveRun(label: String, code: String) = {
- showCodeIfDebugging(code)
+ showCodeIfDebugging(packaged(code))
val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
lastRun = run
success
@@ -899,11 +906,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (!handlers.last.definesValue) ""
else handlers.last.definesTerm match {
case Some(vname) if typeOf contains vname =>
- """
- |lazy val %s = {
- | %s
- | %s
- |}""".stripMargin.format(lineRep.resultName, lineRep.printName, fullPath(vname))
+ "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
case _ => ""
}
// first line evaluates object to make sure constructor is run
@@ -949,7 +952,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
typesOfDefinedTerms
// compile the result-extraction object
- beSilentDuring {
+ beQuietDuring {
savingSettings(_.nowarn.value = true) {
lineRep compile ResultObjectSourceCode(handlers)
}
@@ -1039,16 +1042,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def requestHistoryForName(name: Name): List[Request] =
prevRequests.toList.reverse filter (_.definedNames contains name)
- def safeClass(name: String): Option[Symbol] = {
- try Some(definitions.getClass(newTypeName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
- def safeModule(name: String): Option[Symbol] = {
- try Some(definitions.getModule(newTermName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
def definitionForName(name: Name): Option[MemberHandler] =
requestForName(name) flatMap { req =>
req.handlers find (_.definedNames contains name)
@@ -1060,34 +1053,32 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def classOfTerm(id: String): Option[JClass] =
valueOfTerm(id) map (_.getClass)
- def typeOfTerm(id: String): Option[Type] = newTermName(id) match {
- case nme.ROOTPKG => Some(definitions.RootClass.tpe)
- case name => requestForName(name) flatMap (_.compilerTypeOf get name)
+ def typeOfTerm(id: String): Type = newTermName(id) match {
+ case nme.ROOTPKG => definitions.RootClass.tpe
+ case name => requestForName(name) flatMap (_.compilerTypeOf get name) getOrElse NoType
}
def symbolOfTerm(id: String): Symbol =
requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
- for {
- clazz <- classOfTerm(id)
- tpe <- runtimeTypeOfTerm(id)
- nonAnon <- clazz.supers find (!_.isScalaAnonymous)
- } yield {
- (nonAnon, tpe)
+ classOfTerm(id) flatMap { clazz =>
+ clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
+ (nonAnon, runtimeTypeOfTerm(id))
+ }
}
}
- def runtimeTypeOfTerm(id: String): Option[Type] = {
- for {
- tpe <- typeOfTerm(id)
- clazz <- classOfTerm(id)
- staticSym = tpe.typeSymbol
- runtimeSym <- safeClass(clazz.getName)
- if runtimeSym != staticSym
- if runtimeSym isSubClass staticSym
+ def runtimeTypeOfTerm(id: String): Type = {
+ typeOfTerm(id) andAlso { tpe =>
+ val clazz = classOfTerm(id) getOrElse { return NoType }
+ val staticSym = tpe.typeSymbol
+ val runtimeSym = getClassIfDefined(clazz.getName)
+
+ if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+ runtimeSym.info
+ else NoType
}
- yield runtimeSym.info
}
object replTokens extends {
@@ -1099,16 +1090,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
} with ExprTyper { }
def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
+ def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- }
+
def prettyPrint(code: String) =
replTokens.prettyPrint(exprTyper tokens code)
protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
- def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalVarName
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
def definedTypes = onlyTypes(allDefinedNames)
def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
@@ -1130,6 +1121,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val termname = newTypeName(name)
findName(termname) getOrElse getModuleIfDefined(termname)
}
+ def types[T: ClassManifest] : Symbol = types(classManifest[T].erasure.getName)
+ def terms[T: ClassManifest] : Symbol = terms(classManifest[T].erasure.getName)
+ def apply[T: ClassManifest] : Symbol = apply(classManifest[T].erasure.getName)
/** the previous requests this interpreter has processed */
private lazy val prevRequests = mutable.ListBuffer[Request]()
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index 10e3796404..d34ca8bbca 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -34,8 +34,9 @@ trait Imports {
def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
- def importedTerms = onlyTerms(importHandlers flatMap (_.importedNames))
- def importedTypes = onlyTypes(importHandlers flatMap (_.importedNames))
+ def allImportedNames = importHandlers flatMap (_.importedNames)
+ def importedTerms = onlyTerms(allImportedNames)
+ def importedTypes = onlyTypes(allImportedNames)
/** Types which have been wildcard imported, such as:
* val x = "abc" ; import x._ // type java.lang.String
@@ -49,10 +50,7 @@ trait Imports {
* into the compiler scopes.
*/
def sessionWildcards: List[Type] = {
- importHandlers flatMap {
- case x if x.importsWildcard => x.targetType
- case _ => None
- } distinct
+ importHandlers filter (_.importsWildcard) map (_.targetType) distinct
}
def wildcardTypes = languageWildcards ++ sessionWildcards
@@ -63,14 +61,15 @@ trait Imports {
def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedTermNamed(name: String) = importedTermSymbols find (_.name.toString == name)
+ def importedTermNamed(name: String): Symbol =
+ importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
*/
def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
- val session = importHandlers filter (_.targetType.isDefined) map { mh =>
- (mh.targetType.get.typeSymbol, mh.importedSymbols)
+ val session = importHandlers filter (_.targetType != NoType) map { mh =>
+ (mh.targetType.typeSymbol, mh.importedSymbols)
}
lang ++ session
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index 9c5299b633..d96e8b07fc 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -16,7 +16,7 @@ import collection.mutable.ListBuffer
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
- import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage, getModuleIfDefined }
type ExecResult = Any
import intp.{ debugging, afterTyper }
@@ -24,14 +24,13 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
private var verbosity: Int = 0
def resetVerbosity() = verbosity = 0
- def getType(name: String, isModule: Boolean) = {
- val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
- try Some(f(name).tpe)
- catch { case _: MissingRequirementError => None }
- }
-
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
+ def getSymbol(name: String, isModule: Boolean) = (
+ if (isModule) getModuleIfDefined(name)
+ else getModuleIfDefined(name)
+ )
+ def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
+ def typeOf(name: String) = getType(name, false)
+ def moduleOf(name: String) = getType(name, true)
trait CompilerCompletion {
def tp: Type
@@ -46,9 +45,9 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
private def anyMembers = AnyClass.tpe.nonPrivateMembers
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
- def tos(sym: Symbol) = sym.name.decode.toString
- def memberNamed(s: String) = members find (x => tos(x) == s)
- def hasMethod(s: String) = methods exists (x => tos(x) == s)
+ def tos(sym: Symbol): String = sym.decodedName
+ def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
+ def hasMethod(s: String) = memberNamed(s).isMethod
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
@@ -62,6 +61,13 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def packageNames = packages map tos
def aliasNames = aliases map tos
}
+
+ object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+ override def memberNamed(s: String) = NoSymbol
+ override def members = Nil
+ override def follow(s: String) = None
+ override def alternativesFor(id: String) = Nil
+ }
object TypeMemberCompletion {
def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
@@ -90,7 +96,8 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
}
def apply(tp: Type): TypeMemberCompletion = {
- if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+ if (tp eq NoType) NoTypeCompletion
+ else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
else new TypeMemberCompletion(tp)
}
def imported(tp: Type) = new ImportCompletion(tp)
@@ -118,7 +125,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
debugging(tp + " completions ==> ")(filtered(memberNames))
override def follow(s: String): Option[CompletionAware] =
- debugging(tp + " -> '" + s + "' ==> ")(memberNamed(s) map (x => TypeMemberCompletion(x.tpe)))
+ debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
override def alternativesFor(id: String): List[String] =
debugging(id + " alternatives ==> ") {
@@ -155,28 +162,29 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
object ids extends CompletionAware {
override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
// now we use the compiler for everything.
- override def follow(id: String) = {
- if (completions(0) contains id) {
- intp typeOfExpression id map { tpe =>
- def default = TypeMemberCompletion(tpe)
-
- // only rebinding vals in power mode for now.
- if (!isReplPower) default
- else intp runtimeClassAndTypeOfTerm id match {
- case Some((clazz, runtimeType)) =>
- val sym = intp.symbolOfTerm(id)
- if (sym.isStable) {
- val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
- TypeMemberCompletion(tpe, runtimeType, param)
- }
- else default
- case _ =>
- default
+ override def follow(id: String): Option[CompletionAware] = {
+ if (!completions(0).contains(id))
+ return None
+
+ val tpe = intp typeOfExpression id
+ if (tpe == NoType)
+ return None
+
+ def default = Some(TypeMemberCompletion(tpe))
+
+ // only rebinding vals in power mode for now.
+ if (!isReplPower) default
+ else intp runtimeClassAndTypeOfTerm id match {
+ case Some((clazz, runtimeType)) =>
+ val sym = intp.symbolOfTerm(id)
+ if (sym.isStable) {
+ val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+ Some(TypeMemberCompletion(tpe, runtimeType, param))
}
- }
+ else default
+ case _ =>
+ default
}
- else
- None
}
override def toString = "<repl ids> (%s)".format(completions(0).size)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index b64f14e929..c742ab89c0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -169,7 +169,7 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
- def targetType = intp.typeOfExpression("" + expr)
+ def targetType: Type = intp.typeOfExpression("" + expr)
override def isLegalTopLevel = true
def createImportForName(name: Name): String = {
@@ -199,10 +199,10 @@ trait MemberHandlers {
def importedSymbols = individualSymbols ++ wildcardSymbols
lazy val individualSymbols: List[Symbol] =
- atPickler(targetType.toList flatMap (tp => individualNames map (tp nonPrivateMember _)))
+ atPickler(individualNames map (targetType nonPrivateMember _))
lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) atPickler(targetType.toList flatMap (_.nonPrivateMembers))
+ if (importsWildcard) atPickler(targetType.nonPrivateMembers)
else Nil
/** Complete list of names imported by a wildcard */
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
index 7377953263..8e215cf63b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
@@ -84,7 +84,7 @@ trait Naming {
var x = 0
() => { x += 1 ; x }
}
- def freshUserVarName() = userVar()
+ def freshUserVarName() = userVar()
def freshInternalVarName() = internalVar()
def resetAllCreators() {
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index ac7c2b1ecc..835fbb5638 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -15,54 +15,12 @@ import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
-trait SharesGlobal {
- type GlobalType <: Global
- val global: GlobalType
-
- // This business gets really old:
- //
- // found : power.intp.global.Symbol
- // required: global.Symbol
- //
- // Have tried many ways to cast it aside, this is the current winner.
- // Todo: figure out a way to abstract over all the type members.
- type AnySymbol = Global#Symbol
- type AnyType = Global#Type
- type AnyName = Global#Name
- type AnyTree = Global#Tree
-
- type Symbol = global.Symbol
- type Type = global.Type
- type Name = global.Name
- type Tree = global.Tree
-
- implicit def upDependentSymbol(x: AnySymbol): Symbol = x.asInstanceOf[Symbol]
- implicit def upDependentType(x: AnyType): Type = x.asInstanceOf[Type]
- implicit def upDependentName(x: AnyName): Name = x.asInstanceOf[Name]
- implicit def upDependentTree(x: AnyTree): Tree = x.asInstanceOf[Tree]
-}
-
-object Power {
- def apply(intp: IMain): Power = apply(null, intp)
- def apply(repl: ILoop): Power = apply(repl, repl.intp)
- def apply(repl: ILoop, intp: IMain): Power =
- new Power(repl, intp) {
- type GlobalType = intp.global.type
- final val global: intp.global.type = intp.global
- }
-}
-
/** A class for methods to be injected into the intp in power mode.
*/
-abstract class Power(
- val repl: ILoop,
- val intp: IMain
-) extends SharesGlobal {
- import intp.{
- beQuietDuring, typeOfExpression, interpret, parse
- }
- import global._
- import definitions.{ getClassIfDefined, getModuleIfDefined }
+class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) {
+ import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
+ import intp.global._
+ import definitions.{ manifestToType, manifestToSymbol, getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -107,10 +65,7 @@ abstract class Power(
}
}
- class PackageSlurper(pkgName: String) extends SymSlurper {
- val pkgSymbol = getModuleIfDefined(pkgName)
- val modClass = pkgSymbol.moduleClass
-
+ class PackageSlurper(packageClass: Symbol) extends SymSlurper {
/** Looking for dwindling returns */
def droppedEnough() = unseenHistory.size >= 4 && {
unseenHistory takeRight 4 sliding 2 forall { it =>
@@ -121,20 +76,27 @@ abstract class Power(
def isRecur(sym: Symbol) = true
def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc")
- def isKeep(sym: Symbol) = sym.hasTransOwner(modClass)
+ def isKeep(sym: Symbol) = sym.hasTransOwner(packageClass)
def isFinished() = droppedEnough()
- def slurp() = apply(modClass)
+ def slurp() = {
+ if (packageClass.isPackageClass)
+ apply(packageClass)
+ else {
+ repldbg("Not a package class! " + packageClass)
+ Set()
+ }
+ }
}
private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp())
private def customInit = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
def banner = customBanner getOrElse """
- |** Power User mode enabled - BEEP BOOP SPIZ **
+ |** Power User mode enabled - BEEP WHIR GYVE **
|** :phase has been set to 'typer'. **
|** scala.tools.nsc._ has been imported **
- |** global._ and definitions._ also imported **
- |** Try :help, vals.<tab>, power.<tab> **
+ |** global._, definitions._ also imported **
+ |** Try :help, :vals, power.<tab> **
""".stripMargin.trim
private def initImports = List(
@@ -142,8 +104,9 @@ abstract class Power(
"scala.collection.JavaConverters._",
"intp.global.{ error => _, _ }",
"definitions.{ getClass => _, _ }",
- "power.Implicits._",
- "power.rutil._"
+ "power.rutil._",
+ "replImplicits._",
+ "treedsl.CODE._"
)
def init = customInit match {
@@ -155,88 +118,103 @@ abstract class Power(
*/
def unleash(): Unit = beQuietDuring {
// First we create the ReplVals instance and bind it to $r
- intp.bind("$r", new ReplVals(repl))
+ intp.bind("$r", replVals)
// Then we import everything from $r.
intp interpret ("import " + intp.pathToTerm("$r") + "._")
// And whatever else there is to do.
init.lines foreach (intp interpret _)
}
+ def valsDescription: String = {
+ def to_str(m: Symbol) = "%12s %s".format(
+ m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
+
+ ( rutil.info[ReplValsImpl].membersDeclared
+ filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
+ sortBy (_.decodedName)
+ map to_str
+ mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
+ )
+ }
trait LowPriorityInternalInfo {
implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
}
object InternalInfo extends LowPriorityInternalInfo { }
+
+ /** Now dealing with the problem of acidentally calling a method on Type
+ * when you're holding a Symbol and seeing the Symbol converted to the
+ * type of Symbol rather than the type of the thing represented by the
+ * symbol, by only implicitly installing one method, "?", and the rest
+ * of the conveniences exist on that wrapper.
+ */
+ trait LowPriorityInternalInfoWrapper {
+ implicit def apply[T: Manifest] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
+ }
+ object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
+
+ }
+ class InternalInfoWrapper[T: Manifest](value: Option[T] = None) {
+ def ? : InternalInfo[T] = new InternalInfo[T](value)
+ }
/** Todos...
* translate manifest type arguments into applied types
* customizable symbol filter (had to hardcode no-spec to reduce noise)
*/
class InternalInfo[T: Manifest](value: Option[T] = None) {
- // Decided it was unwise to have implicit conversions via commonly
- // used type/symbol methods, because it's too easy to e.g. call
- // "x.tpe" where x is a Type, and rather than failing you get the
- // Type representing Types#Type (or Manifest, or whatever.)
- private def tpe = tpe_
- private def symbol = symbol_
- private def name = name_
-
- // Would love to have stuff like existential types working,
- // but very unfortunately those manifests just stuff the relevant
- // information into the toString method. Boo.
- private def manifestToType(m: Manifest[_]): Type = m match {
- case x: AnyValManifest[_] =>
- getClassIfDefined("scala." + x).tpe
- case _ =>
- val name = m.erasure.getName
- if (name endsWith nme.MODULE_SUFFIX_STRING)
- getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING).tpe
- else {
- val sym = getClassIfDefined(name)
- val args = m.typeArguments
-
- if (args.isEmpty) sym.tpe
- else typeRef(NoPrefix, sym, args map manifestToType)
- }
- }
-
- def symbol_ : Symbol = getClassIfDefined(erasure.getName)
- def tpe_ : Type = manifestToType(man)
- def name_ : Name = symbol.name
- def companion = symbol.companionSymbol
- def info = symbol.info
- def module = symbol.moduleClass
- def owner = symbol.owner
- def owners = symbol.ownerChain drop 1
- def defn = symbol.defString
-
- def declares = members filter (_.owner == symbol)
- def inherits = members filterNot (_.owner == symbol)
- def types = members filter (_.name.isTypeName)
- def methods = members filter (_.isMethod)
- def overrides = declares filter (_.isOverride)
- def inPackage = owners find (x => x.isPackageClass || x.isPackage) getOrElse definitions.RootPackage
-
- def man = manifest[T]
- def erasure = man.erasure
- def members = tpe.members filterNot (_.name.toString contains "$mc")
- def allMembers = tpe.members
- def bts = info.baseTypeSeq.toList
- def btsmap = bts map (x => (x, x.decls.toList)) toMap
- def pkgName = Option(erasure.getPackage) map (_.getName)
- def pkg = pkgName map getModuleIfDefined getOrElse NoSymbol
- def pkgmates = pkg.tpe.members
- def pkgslurp = pkgName match {
- case Some(name) => new PackageSlurper(name) slurp()
- case _ => Set()
- }
- def ? = this
-
- def whoHas(name: String) = bts filter (_.decls exists (_.name.toString == name))
- def <:<[U: Manifest](other: U) = tpe <:< InternalInfo[U].tpe
- def lub[U: Manifest](other: U) = global.lub(List(tpe, InternalInfo[U].tpe))
- def glb[U: Manifest](other: U) = global.glb(List(tpe, InternalInfo[U].tpe))
+ private def newInfo[U: Manifest](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
+ private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
+ private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
+
+ /** Standard noise reduction filter. */
+ def excludeMember(s: Symbol) = (
+ isSpecialized(s)
+ || isImplClass(s)
+ || s.isAnonOrRefinementClass
+ || s.isAnonymousFunction
+ )
+ def symbol = manifestToSymbol(fullManifest)
+ def tpe = manifestToType(fullManifest)
+ def name = symbol.name
+ def companion = symbol.companionSymbol
+ def info = symbol.info
+ def moduleClass = symbol.moduleClass
+ def owner = symbol.owner
+ def owners = symbol.ownerChain drop 1
+ def signature = symbol.defString
+
+ def decls = info.decls
+ def declsOverride = membersDeclared filter (_.isOverride)
+ def declsOriginal = membersDeclared filterNot (_.isOverride)
+
+ def members = membersUnabridged filterNot excludeMember
+ def membersUnabridged = tpe.members
+ def membersDeclared = members filterNot excludeMember
+ def membersInherited = members filterNot (membersDeclared contains _)
+ def memberTypes = members filter (_.name.isTypeName)
+ def memberMethods = members filter (_.isMethod)
+
+ def pkg = symbol.enclosingPackage
+ def pkgName = pkg.fullName
+ def pkgClass = symbol.enclosingPackageClass
+ def pkgMembers = pkg.info.members filterNot excludeMember
+ def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
+ def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
+
+ def fullManifest = manifest[T]
+ def erasure = fullManifest.erasure
+ def shortClass = erasure.getName split "[$.]" last
+
+ def baseClasses = tpe.baseClasses
+ def baseClassDecls = baseClasses map (x => (x, x.info.decls.toList.sortBy(_.name.toString))) toMap
+ def ancestors = baseClasses drop 1
+ def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
+ def baseTypes = tpe.baseTypeSeq.toList
+
+ def <:<[U: Manifest](other: U) = tpe <:< newInfo(other).tpe
+ def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
+ def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
- def shortClass = erasure.getName split "[$.]" last
override def toString = value match {
case Some(x) => "%s (%s)".format(x, shortClass)
case _ => erasure.getName
@@ -336,11 +314,17 @@ abstract class Power(
def slurp(): String = io.Streamable.slurp(url)
def pp() { intp prettyPrint slurp() }
}
-
- protected trait Implicits1 {
+ class RichSymbolList(syms: List[Symbol]) {
+ def sigs = syms map (_.defString)
+ def infos = syms map (_.info)
+ }
+
+ trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
+
+ implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
class RichSymbol(sym: Symbol) {
@@ -357,7 +341,7 @@ abstract class Power(
implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
implicit lazy val powerTypeOrdering: Ordering[Type] = Ordering[Symbol] on (_.typeSymbol)
- implicit def replInternalInfo[T: Manifest](x: T): InternalInfo[T] = new InternalInfo[T](Some(x))
+ implicit def replInternalInfo[T: Manifest](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
new MultiPrettifierClass[T](xs.toSeq)
@@ -366,8 +350,10 @@ abstract class Power(
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
+
+ implicit def liftToTermName(s: String): TermName = newTermName(s)
+ implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
- object Implicits extends Implicits2 { }
trait ReplUtilities {
def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
@@ -396,17 +382,13 @@ abstract class Power(
}
lazy val rutil: ReplUtilities = new ReplUtilities { }
-
- lazy val phased: Phased = new Phased with SharesGlobal {
- type GlobalType = Power.this.global.type
- final val global: Power.this.global.type = Power.this.global
- }
+ lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def context(code: String) = analyzer.rootContext(unit(code))
def source(code: String) = new BatchSourceFile("<console>", code)
def unit(code: String) = new CompilationUnit(source(code))
def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String): Type = intp.typeOfExpression(id) getOrElse NoType
+ def typeOf(id: String) = intp.typeOfExpression(id)
override def toString = """
|** Power mode status **
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
index dac20ad348..130af990ad 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -14,7 +14,10 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
if (intp.isInitializeComplete) {
- if (intp.totalSilence) ()
+ if (intp.totalSilence) {
+ if (isReplTrace)
+ super.printMessage("[silent] " + msg)
+ }
else super.printMessage(msg)
}
else Console.println("[init] " + msg)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 2f2489b242..6e5dec4205 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -6,15 +6,68 @@
package scala.tools.nsc
package interpreter
-final class ReplVals(r: ILoop) {
- lazy val repl = r
- lazy val intp = r.intp
- lazy val power = r.power
- lazy val reader = r.in
- lazy val vals = this
- lazy val global = intp.global
- lazy val isettings = intp.isettings
- lazy val completion = reader.completion
- lazy val history = reader.history
- lazy val phased = power.phased
+/** A class which the repl utilizes to expose predefined objects.
+ * The base implementation is empty; the standard repl implementation
+ * is StdReplVals.
+ */
+abstract class ReplVals { }
+
+class StdReplVals(final val r: ILoop) extends ReplVals {
+ final lazy val repl = r
+ final lazy val intp = r.intp
+ final lazy val power = r.power
+ final lazy val reader = r.in
+ final lazy val vals = this
+ final lazy val global: intp.global.type = intp.global
+ final lazy val isettings = intp.isettings
+ final lazy val completion = reader.completion
+ final lazy val history = reader.history
+ final lazy val phased = power.phased
+ final lazy val analyzer = global.analyzer
+
+ final lazy val treedsl = new { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+ final lazy val typer = analyzer.newTyper(
+ analyzer.rootContext(
+ power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
+ )
+ )
+
+ final lazy val replImplicits = new power.Implicits2 {
+ import intp.global._
+
+ private val manifestFn = ReplVals.mkManifestToType[intp.global.type](global)
+ implicit def mkManifestToType(sym: Symbol) = manifestFn(sym)
+ }
+
+ def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
+}
+
+object ReplVals {
+ /** Latest attempt to work around the challenge of foo.global.Type
+ * not being seen as the same type as bar.global.Type even though
+ * the globals are the same. Dependent method types to the rescue.
+ */
+ def mkManifestToType[T <: Global](global: T) = {
+ import global._
+ import definitions._
+
+ /** We can't use definitions.manifestToType directly because we're passing
+ * it to map and the compiler refuses to perform eta expansion on a method
+ * with a dependent return type. (Can this be relaxed?) To get around this
+ * I have this forwarder which widens the type and then cast the result back
+ * to the dependent type.
+ */
+ def manifestToType(m: OptManifest[_]): Global#Type =
+ definitions.manifestToType(m)
+
+ class AppliedTypeFromManifests(sym: Symbol) {
+ def apply[M](implicit m1: Manifest[M]): Type =
+ appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x).asInstanceOf[Type]))
+
+ def apply[M1, M2](implicit m1: Manifest[M1], m2: Manifest[M2]): Type =
+ appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type]))
+ }
+
+ (sym: Symbol) => new AppliedTypeFromManifests(sym)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 494eb4d50b..b51cf1228c 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -211,7 +211,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
var start = 0
while (true) {
val index = path.indexOf(separator, start)
- assert(index < 0 || start < index)
+ assert(index < 0 || start < index, ((path, directory, start, index)))
val name = path.substring(start, if (index < 0) length else index)
file = getFile(file, name, if (index < 0) directory else true)
if ((file eq null) || index < 0) return file
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
index 69db23923d..d970d0e9c9 100644
--- a/src/compiler/scala/tools/nsc/io/MsilFile.scala
+++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala
@@ -13,4 +13,6 @@ import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
* uniformly, as AbstractFiles.
*/
class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
-} \ No newline at end of file
+}
+
+object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/NoAbstractFile.scala b/src/compiler/scala/tools/nsc/io/NoAbstractFile.scala
new file mode 100644
index 0000000000..36cf42d7ec
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/NoAbstractFile.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import java.io.InputStream
+
+/** A distinguished object so you can avoid both null
+ * and Option.
+ */
+object NoAbstractFile extends AbstractFile {
+ def absolute: AbstractFile = this
+ def container: AbstractFile = this
+ def create(): Unit = ???
+ def delete(): Unit = ???
+ def file: JFile = null
+ def input: InputStream = null
+ def isDirectory: Boolean = false
+ def iterator: Iterator[AbstractFile] = Iterator.empty
+ def lastModified: Long = 0L
+ def lookupName(name: String, directory: Boolean): AbstractFile = null
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = null
+ def name: String = ""
+ def output: java.io.OutputStream = null
+ def path: String = ""
+ override def toByteArray = Array[Byte]()
+}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index 9efff089ba..a1b8e5e4d5 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -48,7 +48,7 @@ object Path {
implicit def jfile2path(jfile: JFile): Path = apply(jfile)
// java 7 style, we don't use it yet
- // object AccessMode extends Enumeration("AccessMode") {
+ // object AccessMode extends Enumeration {
// val EXECUTE, READ, WRITE = Value
// }
// def checkAccess(modes: AccessMode*): Boolean = {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 742e9e03ca..0c94e40d68 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -126,11 +126,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (treeInfo.firstConstructor(stats) == EmptyTree) makeConstructor(List()) :: stats
else stats)
- def makeParam(name: String, tpt: Tree) =
- ValDef(Modifiers(Flags.JAVA | Flags.PARAM), newTermName(name), tpt, EmptyTree)
+ def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
+ makeParam(nme.syntheticParamName(count), tpt)
+ def makeParam(name: String, tpt: Tree): ValDef =
+ makeParam(newTypeName(name), tpt)
+ def makeParam(name: TermName, tpt: Tree): ValDef =
+ ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
def makeConstructor(formals: List[Tree]) = {
- val vparams = formals.zipWithIndex map { case (p, i) => makeParam("x$" + (i + 1), p) }
+ val vparams = mapWithIndex(formals)((p, i) => makeSyntheticParam(i + 1, p))
DefDef(Modifiers(Flags.JAVA), nme.CONSTRUCTOR, List(), List(vparams), TypeTree(), blankExpr)
}
@@ -547,7 +551,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(newTermName("runtime")), tpnme.AnnotationDefaultATTR), List(List()))
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), List(List()))
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
@@ -794,9 +798,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
accept(INTERFACE)
val pos = in.currentPos
val name = identForType()
- val parents = List(scalaDot(newTypeName("Annotation")),
- Select(javaLangDot(newTermName("annotation")), newTypeName("Annotation")),
- scalaDot(newTypeName("ClassfileAnnotation")))
+ val parents = List(scalaDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ scalaDot(tpnme.ClassfileAnnotation))
val (statics, body) = typeBody(AT, name)
def getValueMethodType(tree: Tree) = tree match {
case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
@@ -838,18 +842,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
val predefs = List(
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("values"), List(),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.values, List(),
List(List()),
arrayOf(enumType),
blankExpr),
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("valueOf"), List(),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
List(List(makeParam("x", TypeTree(StringClass.tpe)))),
enumType,
blankExpr))
accept(RBRACE)
val superclazz =
- AppliedTypeTree(javaLangDot(newTypeName("Enum")), List(enumType))
+ AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
ClassDef(mods, name, List(),
makeTemplate(superclazz :: interfaces, body))
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 61864f0c8a..d81f05cd51 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -252,7 +252,8 @@ trait Matrix extends MatrixAdditions {
{
val n = if (name == null) cunit.freshTermName("temp") else name
// careful: pos has special meaning
- recordSyntheticSym(owner.newVariable(pos, n) setInfo tpe setFlag (SYNTHETIC.toLong /: flags)(_|_))
+ val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
+ recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
}
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 11d829eadb..9d4c9b4411 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -54,7 +54,7 @@ trait ParallelMatching extends ast.TreeDSL
}
def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(owner.pos, labelName)
+ val labelSym = owner.newLabel(labelName, owner.pos)
val labelInfo = MethodType(params, restpe)
LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 420fba911b..18409cfffe 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -36,6 +36,9 @@ trait Patterns extends ast.TreeDSL {
// case _ => NoSymbol
// }
+ private lazy val dummyMethod =
+ NoSymbol.newTermSymbol(newTermName("matching$dummy"))
+
// Fresh patterns
def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
@@ -191,9 +194,9 @@ trait Patterns extends ast.TreeDSL {
// As yet I can't testify this is doing any good relative to using
// tpt.tpe, but it doesn't seem to hurt either.
private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = typeRef(NoPrefix, ConsClass, List(packedType))
- private lazy val listRef = typeRef(NoPrefix, ListClass, List(packedType))
- private lazy val seqRef = typeRef(NoPrefix, SeqClass, List(packedType))
+ private lazy val consRef = appliedType(ConsClass.typeConstructor, List(packedType))
+ private lazy val listRef = appliedType(ListClass.typeConstructor, List(packedType))
+ private lazy val seqRef = appliedType(SeqClass.typeConstructor, List(packedType))
private def thisSeqRef = {
val tc = (tree.tpe baseType SeqClass).typeConstructor
@@ -205,7 +208,6 @@ trait Patterns extends ast.TreeDSL {
private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
case _ =>
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "matching$dummy")
val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
Apply(TypeTree(consType), List(hd, tl)) setType consRef
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 36227c1052..da913a1601 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -28,7 +28,7 @@ trait Plugins {
val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
- // Lach plugin must only be instantiated once. A common pattern
+ // Each plugin must only be instantiated once. A common pattern
// is to register annotation checkers during object construction, so
// creating multiple plugin instances will leave behind stale checkers.
classes map (Plugin.instantiate(_, this))
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 967b582f11..c76a04c6ba 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -47,20 +47,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
/** Prints the message with the given position indication. */
def printMessage(posIn: Position, msg: String) {
- val pos = if (posIn eq null) NoPosition
- else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
- else posIn
- pos match {
- case FakePos(fmsg) =>
- printMessage(fmsg+" "+msg)
- case NoPosition =>
- printMessage(msg)
- case _ =>
- val buf = new StringBuilder(msg)
- val file = pos.source.file
- printMessage((if (shortname) file.name else file.path)+":"+pos.line+": "+msg)
- printSourceLine(pos)
- }
+ printMessage(Position.formatMessage(posIn, msg, shortname))
}
def print(pos: Position, msg: String, severity: Severity) {
printMessage(pos, clabel(severity) + msg)
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 12306606e4..f19a285d7c 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -47,14 +47,23 @@ abstract class Reporter {
finally incompleteHandler = saved
}
- var cancelled = false
- def hasErrors = ERROR.count > 0 || cancelled
- def hasWarnings = WARNING.count > 0
+ var cancelled = false
+ def hasErrors = ERROR.count > 0 || cancelled
+ def hasWarnings = WARNING.count > 0
- def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
- def warning(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, WARNING, false)) }
- def error(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, ERROR, false)) }
- def incompleteInputError(pos: Position, msg: String ) {
+ /** For sending a message which should not be labeled as a warning/error,
+ * but also shouldn't require -verbose to be visible.
+ */
+ def echo(msg: String): Unit = info(NoPosition, msg, true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+
+ /** Informational messages, suppressed unless -verbose or force=true. */
+ def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
+
+ /** Warnings and errors. */
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
index 800af55861..f55d0684c8 100644
--- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
@@ -13,8 +13,6 @@ import scala.tools.util.AbstractTimer
* timings.
*/
class ReporterTimer(reporter: Reporter) extends AbstractTimer {
-
def issue(msg: String, duration: Long) =
reporter.info(null, "[" + msg + " in " + duration + "ms]", false)
-
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index b468e7c0af..f99d1399c0 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -95,8 +95,7 @@ class MutableSettings(val errorFn: String => Unit)
*/
def copy(): Settings = {
val s = new Settings()
- val xs = userSetSettings flatMap (_.unparse)
- s.processArguments(xs.toList, true)
+ s.processArguments(recreateArgs, true)
s
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 7fcfb6fc6d..e949cb3eb2 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -67,6 +67,8 @@ trait ScalaSettings extends AbsScalaSettings
val future = BooleanSetting ("-Xfuture", "Turn on future language features.")
val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
+ val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+ val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
@@ -94,6 +96,7 @@ trait ScalaSettings extends AbsScalaSettings
val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions.") .
withPostSetHook(set => List(YmethodInfer, overrideObjects) foreach (_.value = set.value))
// YdepMethTpes, YvirtClasses,
+ val Xmacros = BooleanSetting ("-Xmacros", "Enable macros.")
/** Compatibility stubs for options whose value name did
* not previously match the option name.
@@ -137,6 +140,7 @@ trait ScalaSettings extends AbsScalaSettings
val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs.")
val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
+ val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val skip = PhasesSetting ("-Yskip", "Skip")
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
@@ -162,7 +166,8 @@ trait ScalaSettings extends AbsScalaSettings
val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yreifycopypaste =
BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification actions.")
+ val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification.")
+ val Ymacrodebug = BooleanSetting ("-Ymacro-debug", "Trace macro-related activities: generation of synthetics, expansion, exceptions.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") .
withPostSetHook(_ => interpreter.replProps.debug setValue true)
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 340e1d1d08..942ec1fa86 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -39,7 +39,7 @@ abstract class SymbolLoaders {
*/
def enterClass(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
val owner = realOwner(root)
- val clazz = owner.newClass(NoPosition, newTypeName(name))
+ val clazz = owner.newClass(newTypeName(name))
clazz setInfo completer
enterIfNew(owner, clazz, completer)
}
@@ -49,7 +49,7 @@ abstract class SymbolLoaders {
*/
def enterModule(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
val owner = realOwner(root)
- val module = owner.newModule(NoPosition, newTermName(name))
+ val module = owner.newModule(newTermName(name))
module setInfo completer
module.moduleClass setInfo moduleClassLoader
enterIfNew(owner, module, completer)
@@ -199,7 +199,7 @@ abstract class SymbolLoaders {
return
}
}
- val pkg = root.newPackage(NoPosition, newTermName(name))
+ val pkg = root.newPackage(newTermName(name))
pkg.moduleClass.setInfo(completer)
pkg.setInfo(pkg.moduleClass.tpe)
root.info.decls.enter(pkg)
@@ -207,7 +207,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
- root.setInfo(new PackageClassInfoType(new Scope(), root))
+ root.setInfo(new PackageClassInfoType(newScope, root))
val sourcepaths = classpath.sourcepaths
for (classRep <- classpath.classes if platform.doLoad(classRep)) {
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index 9fbf649525..a47bfda8c1 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package symtab
import ast.{Trees, TreePrinters, DocComments}
-
import util._
-abstract class SymbolTable extends reflect.internal.SymbolTable
+abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index e67ce90cfa..a8083d7a2d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -238,9 +238,9 @@ abstract class ClassfileParser {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
//assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name.subName(0, name.length - 1), true)
+ f = forceMangledName(name dropRight 1, true)
if (f == NoSymbol)
- f = definitions.getModule(name.subName(0, name.length - 1))
+ f = definitions.getModule(name dropRight 1)
} else {
val origName = nme.originalName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
@@ -255,8 +255,11 @@ abstract class ClassfileParser {
f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
- f = if (tpe.isInstanceOf[MethodType]) owner.newMethod(owner.pos, name).setInfo(tpe)
- else owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE)
+ f = tpe match {
+ case MethodType(_, _) => owner.newMethod(name, owner.pos)
+ case _ => owner.newVariable(name, owner.pos)
+ }
+ f setInfo tpe
log("created fake member " + f.fullName)
}
// println("\townerTpe.decls: " + ownerTpe.decls)
@@ -283,7 +286,7 @@ abstract class ClassfileParser {
if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
val name = getName(in.getChar(start + 1).toInt)
// create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(ownerTpe.typeSymbol.pos, name)
+ val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
var tpe = getType(dummySym, in.getChar(start + 3).toInt)
// fix the return type, which is blindly set to the class currently parsed
@@ -365,6 +368,13 @@ abstract class ClassfileParser {
case arr: Type => Constant(arr)
}
}
+
+ private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
+ val decodedLength = ByteCodecs.decode(bytes)
+ val arr = new Array[Byte](decodedLength)
+ System.arraycopy(bytes, 0, arr, 0, decodedLength)
+ arr
+ }
def getBytes(index: Int): Array[Byte] = {
if (index <= 0 || len <= index) errorBadIndex(index)
@@ -372,18 +382,17 @@ abstract class ClassfileParser {
if (value eq null) {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
+ val len = in.getChar(start + 1)
val bytes = new Array[Byte](len)
- Array.copy(in.buf, start + 3, bytes, 0, len)
- val decodedLength = ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ System.arraycopy(in.buf, start + 3, bytes, 0, len)
+ value = getSubArray(bytes)
values(index) = value
}
value
}
def getBytes(indices: List[Int]): Array[Byte] = {
- assert(!indices.isEmpty)
+ assert(!indices.isEmpty, indices)
var value = values(indices.head).asInstanceOf[Array[Byte]]
if (value eq null) {
val bytesBuffer = ArrayBuffer.empty[Byte]
@@ -394,9 +403,7 @@ abstract class ClassfileParser {
val len = in.getChar(start + 1)
bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
}
- val bytes = bytesBuffer.toArray
- val decodedLength = ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ value = getSubArray(bytesBuffer.toArray)
values(indices.head) = value
}
value
@@ -435,38 +442,36 @@ abstract class ClassfileParser {
/** Return the class symbol of the given name. */
def classNameToSymbol(name: Name): Symbol = {
- def loadClassSymbol(name: Name) = {
- val s = name.toString
- val file = global.classPath findSourceFile s getOrElse {
- MissingRequirementError.notFound("class " + s)
+ def loadClassSymbol(name: Name): Symbol = {
+ val file = global.classPath findSourceFile ("" +name) getOrElse {
+ warning("Class " + name + " not found - continuing with a stub.")
+ return NoSymbol.newClass(name.toTypeName)
}
- val completer = new global.loaders.ClassfileLoader(file)
+ val completer = new global.loaders.ClassfileLoader(file)
var owner: Symbol = definitions.RootClass
- var sym: Symbol = NoSymbol
- var ss: String = null
- var start = 0
- var end = s indexOf '.'
+ var sym: Symbol = NoSymbol
+ var ss: Name = null
+ var start = 0
+ var end = name indexOf '.'
+
while (end > 0) {
- ss = s.substring(start, end)
+ ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
- sym = owner.newPackage(NoPosition, ss) setInfo completer
+ sym = owner.newPackage(ss) setInfo completer
sym.moduleClass setInfo completer
owner.info.decls enter sym
}
owner = sym.moduleClass
start = end + 1
- end = s.indexOf('.', start)
+ end = name.indexOf('.', start)
}
- ss = s substring start
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newClass(NoPosition, newTypeName(ss)) setInfo completer
- owner.info.decls enter sym
- if (settings.debug.value && settings.verbose.value)
- println("loaded "+sym+" from file "+file)
+ ss = name.subName(0, start)
+ owner.info.decls lookup ss orElse {
+ sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
+ debuglog("loaded "+sym+" from file "+file)
+ sym
}
- sym
}
def lookupClass(name: Name) = try {
@@ -495,7 +500,7 @@ abstract class ClassfileParser {
def parseClass() {
val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
- var sflags = toScalaFlags(jflags, isClass = true)
+ var sflags = toScalaClassFlags(jflags)
var nameIdx = in.nextChar
externalName = pool.getClassName(nameIdx)
val c = if (externalName.toString.indexOf('$') < 0) pool.getClassSymbol(nameIdx) else clazz
@@ -531,8 +536,8 @@ abstract class ClassfileParser {
addEnclosingTParams(clazz)
parseInnerClasses() // also sets the isScala / isScalaRaw / hasMeta flags, see r15956
// get the class file parser to reuse scopes.
- instanceDefs = new Scope
- staticDefs = new Scope
+ instanceDefs = newScope
+ staticDefs = newScope
val classInfo = ClassInfoType(parseParents, instanceDefs, clazz)
val staticInfo = ClassInfoType(List(), staticDefs, statics)
@@ -600,13 +605,13 @@ abstract class ClassfileParser {
def parseField() {
val jflags = in.nextChar
- var sflags = toScalaFlags(jflags, isField = true)
- if ((sflags & PRIVATE) != 0L && !global.settings.XO.value) {
+ var sflags = toScalaFieldFlags(jflags)
+ if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(NoPosition, name).setFlag(sflags)
+ val sym = getOwner(jflags).newValue(name, NoPosition, sflags)
val isEnum = (jflags & JAVA_ACC_ENUM) != 0
sym setInfo {
@@ -631,8 +636,8 @@ abstract class ClassfileParser {
def parseMethod() {
val jflags = in.nextChar.toInt
- var sflags = toScalaFlags(jflags)
- if (isPrivate(jflags) && !global.settings.XO.value) {
+ var sflags = toScalaMethodFlags(jflags)
+ if (isPrivate(jflags) && !global.settings.optimise.value) {
val name = pool.getName(in.nextChar)
if (name == nme.CONSTRUCTOR)
sawPrivateConstructor = true
@@ -640,11 +645,11 @@ abstract class ClassfileParser {
} else {
if ((jflags & JAVA_ACC_BRIDGE) != 0)
sflags |= BRIDGE
- if ((sflags & PRIVATE) != 0L && global.settings.XO.value) {
+ if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) {
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(NoPosition, name).setFlag(sflags)
+ val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
var info = pool.getType(sym, (in.nextChar))
if (name == nme.CONSTRUCTOR)
info match {
@@ -674,7 +679,7 @@ abstract class ClassfileParser {
var index = 0
val end = sig.length
def accept(ch: Char) {
- assert(sig(index) == ch)
+ assert(sig(index) == ch, (sig(index), ch))
index += 1
}
def subName(isDelimiter: Char => Boolean): Name = {
@@ -682,8 +687,6 @@ abstract class ClassfileParser {
while (!isDelimiter(sig(index))) { index += 1 }
sig.subName(start, index)
}
- def existentialType(tparams: List[Symbol], tp: Type): Type =
- if (tparams.isEmpty) tp else ExistentialType(tparams, tp)
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig(index); index += 1
tag match {
@@ -716,10 +719,15 @@ abstract class ClassfileParser {
index += 1
val bounds = variance match {
case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
- case '-' => TypeBounds.lower(sig2type(tparams, skiptvs))
+ case '-' =>
+ val tp = sig2type(tparams, skiptvs)
+ // sig2type seems to return AnyClass regardless of the situation:
+ // we don't want Any as a LOWER bound.
+ if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
- val newtparam = sym.newExistential(sym.pos, newTypeName("?"+i)) setInfo bounds
+ val newtparam = sym.newExistential(newTypeName("?"+i), sym.pos) setInfo bounds
existentials += newtparam
xs += newtparam.tpe //@M should probably be .tpeHK
i += 1
@@ -728,21 +736,21 @@ abstract class ClassfileParser {
}
}
accept('>')
- assert(xs.length > 0)
- existentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
+ assert(xs.length > 0, tp)
+ newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
} else if (classSym.isMonomorphicType) {
tp
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = typeRef(pre, classSym, eparams.map(_.tpe))
- val res = existentialType(eparams, t)
+ val res = newExistentialType(eparams, t)
if (settings.debug.value && settings.verbose.value)
println("raw type " + classSym + " -> " + res)
res
}
case tp =>
- assert(sig(index) != '<')
+ assert(sig(index) != '<', tp)
tp
}
@@ -768,7 +776,7 @@ abstract class ClassfileParser {
appliedType(definitions.ArrayClass.tpe, List(elemtp))
case '(' =>
// we need a method symbol. given in line 486 by calling getType(methodSym, ..)
- assert(sym ne null)
+ assert(sym ne null, sig)
val paramtypes = new ListBuffer[Type]()
while (sig(index) != ')') {
paramtypes += objToAny(sig2type(tparams, skiptvs))
@@ -801,12 +809,12 @@ abstract class ClassfileParser {
var tparams = classTParams
val newTParams = new ListBuffer[Symbol]()
if (sig(index) == '<') {
- assert(sym != null)
+ assert(sym != null, sig)
index += 1
val start = index
while (sig(index) != '>') {
val tpname = subName(':'.==).toTypeName
- val s = sym.newTypeParameter(NoPosition, tpname)
+ val s = sym.newTypeParameter(tpname)
tparams = tparams + (tpname -> s)
sig2typeBounds(tparams, true)
newTParams += s
@@ -966,27 +974,22 @@ abstract class ClassfileParser {
def parseScalaSigBytes: Option[ScalaSigBytes] = {
val tag = in.nextByte.toChar
- assert(tag == STRING_TAG)
+ assert(tag == STRING_TAG, tag)
Some(ScalaSigBytes(pool getBytes in.nextChar))
}
- def parseScalaLongSigBytes: Option[ScalaSigBytes] = try {
+ def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
val tag = in.nextByte.toChar
- assert(tag == ARRAY_TAG)
+ assert(tag == ARRAY_TAG, tag)
val stringCount = in.nextChar
val entries =
for (i <- 0 until stringCount) yield {
val stag = in.nextByte.toChar
- assert(stag == STRING_TAG)
+ assert(stag == STRING_TAG, stag)
in.nextChar.toInt
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
- catch {
- case e: Throwable =>
- e.printStackTrace
- throw e
- }
/** Parse and return a single annotation. If it is malformed,
* return None.
@@ -1071,27 +1074,27 @@ abstract class ClassfileParser {
}
def enterClassAndModule(entry: InnerClassEntry, completer: global.loaders.SymbolLoader, jflags: Int) {
- val name = entry.originalName
- var sflags = toScalaFlags(jflags, isClass = true)
+ val name = entry.originalName
+ var sflags = toScalaClassFlags(jflags)
+ val owner = getOwner(jflags)
+ val scope = getScope(jflags)
+ val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
+ val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
- val innerClass = getOwner(jflags).newClass(NoPosition, name.toTypeName).setInfo(completer).setFlag(sflags)
- val innerModule = getOwner(jflags).newModule(NoPosition, name.toTermName).setInfo(completer).setFlag(sflags)
innerModule.moduleClass setInfo global.loaders.moduleClassLoader
-
- getScope(jflags) enter innerClass
- getScope(jflags) enter innerModule
+ scope enter innerClass
+ scope enter innerModule
val decls = innerClass.enclosingPackage.info.decls
- val e = decls.lookupEntry(className(entry.externalName))
- if (e ne null) {
- //println("removing " + e)
- decls.unlink(e)
- }
- val e1 = decls.lookupEntry(className(entry.externalName).toTypeName)
- if (e1 ne null) {
- //println("removing " + e1)
- decls.unlink(e1)
+ def unlinkIfPresent(name: Name) = {
+ val e = decls lookupEntry name
+ if (e ne null)
+ decls unlink e
}
+
+ val cName = className(entry.externalName)
+ unlinkIfPresent(cName.toTermName)
+ unlinkIfPresent(cName.toTypeName)
}
for (entry <- innerClasses.values) {
@@ -1205,7 +1208,12 @@ abstract class ClassfileParser {
atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
else
getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members)
+
+ assert(s ne NoSymbol,
+ "" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " +
+ " while parsing " + ((in.file, busy)) +
+ sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members
+ )
s
case None =>
@@ -1261,13 +1269,13 @@ abstract class ClassfileParser {
if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
// See ticket #1687 for an example of when topLevelClass is NoSymbol: it
// apparently occurs when processing v45.3 bytecode.
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
// protected in java means package protected. #3946
if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
}
@inline private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index a203b8a78b..7d42dabc08 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -25,10 +25,7 @@ abstract class ICodeReader extends ClassfileParser {
var instanceCode: IClass = null // the ICode class for the current symbol
var staticCode: IClass = null // the ICode class static members
- var method: IMethod = _ // the current IMethod
-
- val nothingName = newTermName(SCALA_NOTHING)
- val nullName = newTermName(SCALA_NULL)
+ var method: IMethod = NoIMethod // the current IMethod
var isScalaModule = false
/** Read back bytecode for the given class symbol. It returns
@@ -63,8 +60,7 @@ abstract class ICodeReader extends ClassfileParser {
this.staticCode = new IClass(staticModule)
val jflags = in.nextChar
val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
- var sflags = toScalaFlags(jflags, true)
- if ((sflags & DEFERRED) != 0L) sflags = sflags & ~DEFERRED | ABSTRACT
+ val sflags = toScalaClassFlags(jflags) // what, this is never used??
val c = pool getClassSymbol in.nextChar
parseInnerClasses()
@@ -89,29 +85,24 @@ abstract class ICodeReader extends ClassfileParser {
val jflags = in.nextChar
val name = pool getName in.nextChar
val owner = getOwner(jflags)
- val dummySym = owner.newMethod(owner.pos, name) setFlag javaToScalaFlags(jflags)
+ val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
try {
- val ch = in.nextChar
- var tpe = pool.getType(dummySym, ch)
+ val ch = in.nextChar
+ val tpe = pool.getType(dummySym, ch)
if ("<clinit>" == name.toString)
(jflags, NoSymbol)
else {
val owner = getOwner(jflags)
- var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe));
+ var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(old => old.tpe =:= tpe);
+ sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
log("Could not find symbol for " + name + ": " + tpe)
log(owner.info.member(name).tpe + " : " + tpe)
- if (name.toString == "toMap")
- tpe = pool.getType(dummySym, ch)
- if (field)
- sym = owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE | javaToScalaFlags(jflags))
- else
- sym = dummySym.setInfo(tpe)
- owner.info.decls.enter(sym)
+ sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym setInfoAndEnter tpe
log("added " + sym + ": " + sym.tpe)
}
(jflags, sym)
@@ -122,19 +113,6 @@ abstract class ICodeReader extends ClassfileParser {
}
}
- private def javaToScalaFlags(flags: Int): Long = {
- import ch.epfl.lamp.fjbg.JAccessFlags._
-
- var res = 0L
- if ((flags & ACC_PRIVATE) != 0) res |= Flags.PRIVATE
- if ((flags & ACC_PROTECTED) != 0) res |= Flags.PROTECTED
- if ((flags & ACC_FINAL) != 0) res |= Flags.FINAL
- if ((flags & ACC_ABSTRACT) != 0) res |= Flags.DEFERRED
- if ((flags & ACC_SYNTHETIC) != 0) res |= Flags.SYNTHETIC
-
- res
- }
-
/** Checks if `tp1` is the same type as `tp2`, modulo implicit methods.
* We don't care about the distinction between implicit and explicit
* methods as this point, and we can't get back the information from
@@ -182,9 +160,9 @@ abstract class ICodeReader extends ClassfileParser {
}
override def classNameToSymbol(name: Name) = {
- val sym = if (name == nothingName)
+ val sym = if (name == fulltpnme.RuntimeNothing)
definitions.NothingClass
- else if (name == nullName)
+ else if (name == fulltpnme.RuntimeNull)
definitions.NullClass
else if (nme.isImplClassName(name)) {
val iface = definitions.getClass(nme.interfaceName(name))
@@ -194,7 +172,7 @@ abstract class ICodeReader extends ClassfileParser {
}
else if (nme.isModuleName(name)) {
val strippedName = nme.stripModuleSuffix(name)
- val sym = forceMangledName(strippedName.decode, true)
+ val sym = forceMangledName(newTermName(strippedName.decode), true)
if (sym == NoSymbol) definitions.getModule(strippedName)
else sym
@@ -456,7 +434,7 @@ abstract class ICodeReader extends ClassfileParser {
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
size += padding
in.bp += padding
- assert((pc + size % 4) != 0)
+ assert((pc + size % 4) != 0, pc)
/* var byte1 = in.nextByte; size += 1;
while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
@@ -476,7 +454,7 @@ abstract class ICodeReader extends ClassfileParser {
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
size += padding
in.bp += padding
- assert((pc + size % 4) != 0)
+ assert((pc + size % 4) != 0, pc)
val default = pc + in.nextInt; size += 4
val npairs = in.nextInt; size += 4
var tags: List[List[Int]] = Nil
@@ -629,7 +607,7 @@ abstract class ICodeReader extends ClassfileParser {
skipAttributes()
code.toBasicBlock
- assert(method.code ne null)
+ assert(method.hasCode, method)
// reverse parameters, as they were prepended during code generation
method.params = method.params.reverse
@@ -692,7 +670,7 @@ abstract class ICodeReader extends ClassfileParser {
mutable.Map(jmpTargets.toSeq map (_ -> code.newBlock): _*)
val blocks = makeBasicBlocks
- var otherBlock: BasicBlock = null
+ var otherBlock: BasicBlock = NoBasicBlock
var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
@@ -991,7 +969,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(NoPosition, "loc" + idx).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -1008,9 +986,9 @@ abstract class ICodeReader extends ClassfileParser {
/** add a method param with the given index. */
def enterParam(idx: Int, kind: TypeKind) = {
- val sym = method.symbol.newVariable(NoPosition, "par" + idx).setInfo(kind.toType)
+ val sym = method.symbol.newVariable(newTermName("par" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, true)
- assert(!locals.isDefinedAt(idx))
+ assert(!locals.isDefinedAt(idx), locals(idx))
locals += (idx -> List((l, kind)))
l
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
index 728593abe7..eb8e7a14a5 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
@@ -49,7 +49,7 @@ abstract class MetaParser{
val sym = locals.lookup(newTypeName(str))
if (sym != NoSymbol) sym.tpe
else {
- val tp = definitions.getClass(str).tpe;
+ val tp = definitions.getRequiredClass(str).tpe;
if (token != "[") tp
else {
val args = new ListBuffer[Type];
@@ -68,7 +68,7 @@ abstract class MetaParser{
else if (token == "-") { nextToken(); Flags.CONTRAVARIANT }
else 0;
assert(token startsWith "?", token)
- val sym = owner.newTypeParameter(NoPosition, newTypeName(token)).setFlag(vflag)
+ val sym = owner.newTypeParameter(newTypeName(token)).setFlag(vflag)
nextToken()
val lo =
if (token == ">") { nextToken(); parseType() }
@@ -108,7 +108,7 @@ abstract class MetaParser{
}
protected def parseClass() {
- locals = new Scope
+ locals = newScope
def parse(): Type = {
nextToken()
if (token == "[") {
@@ -130,7 +130,7 @@ abstract class MetaParser{
protected def parseMethod() {
val globals = locals
- locals = if (locals eq null) new Scope else new Scope(locals)
+ locals = if (locals eq null) newScope else newNestedScope(locals)
def parse(): Type = {
nextToken();
if (token == "[") PolyType(parseTypeParams(), parse())
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index e7d08ef849..25ae6f33d2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -59,7 +59,7 @@ abstract class Pickler extends SubComponent {
}
}
// If there are any erroneous types in the tree, then we will crash
- // when we pickle it: so let's report an erorr instead. We know next
+ // when we pickle it: so let's report an error instead. We know next
// to nothing about what happened, but our supposition is a lot better
// than "bad type: <error>" in terms of explanatory power.
for (t <- unit.body ; if t.isErroneous) {
@@ -145,11 +145,8 @@ abstract class Pickler extends SubComponent {
val (locals, globals) = sym.children partition (_.isLocalClass)
val children =
if (locals.isEmpty) globals
- else {
- val localChildDummy = sym.newClass(sym.pos, tpnme.LOCAL_CHILD)
- localChildDummy.setInfo(ClassInfoType(List(sym.tpe), EmptyScope, localChildDummy))
- globals + localChildDummy
- }
+ else globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, List(sym.tpe), EmptyScope, pos = sym.pos)
+
putChildren(sym, children.toList sortBy (_.sealedSortName))
}
for (annot <- sym.annotations filter (ann => ann.isStatic && !ann.isErroneous) reverse)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 95ef799720..e11a5a4ad9 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -34,6 +34,8 @@ abstract class TypeParser {
protected var busy: Boolean = false // lock to detect recursive reads
+ private implicit def stringToTermName(s: String): TermName = newTermName(s)
+
private object unpickler extends UnPickler {
val global: TypeParser.this.global.type = TypeParser.this.global
}
@@ -153,8 +155,8 @@ abstract class TypeParser {
val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append "Boxed")
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append "MgdPtr")
+ clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
+ clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
/* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
@@ -163,7 +165,7 @@ abstract class TypeParser {
clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
/* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
because there's no metadata-level representation for a "boxed valuetype" */
- val instanceDefsMgdPtr = new Scope
+ val instanceDefsMgdPtr = newScope
val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
clazzMgdPtr.setFlag(flags)
clazzMgdPtr.setInfo(classInfoMgdPtr)
@@ -173,7 +175,7 @@ abstract class TypeParser {
// first pass
for (tvarCILDef <- typ.getSortedTVars() ) {
val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
- val tpsym = clazz.newTypeParameter(NoPosition, tpname)
+ val tpsym = clazz.newTypeParameter(tpname)
classTParams.put(tvarCILDef.Number, tpsym)
newTParams += tpsym
// TODO wouldn't the following also be needed later, i.e. during getCLRType
@@ -194,8 +196,8 @@ abstract class TypeParser {
}
}
/* END CLR generics (snippet 2) */
- instanceDefs = new Scope
- staticDefs = new Scope
+ instanceDefs = newScope
+ staticDefs = newScope
val classInfoAsInMetadata = {
val ifaces: Array[MSILType] = typ.getInterfaces()
@@ -210,7 +212,7 @@ abstract class TypeParser {
}
// methods, properties, events, fields are entered in a moment
if (canBeTakenAddressOf) {
- val instanceDefsBoxed = new Scope
+ val instanceDefsBoxed = newScope
ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
} else
ClassInfoType(parents.toList, instanceDefs, clazz)
@@ -257,8 +259,8 @@ abstract class TypeParser {
|| ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
{
val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
- val nclazz = statics.newClass(NoPosition, ntype.Name.toTypeName)
- val nmodule = statics.newModule(NoPosition, ntype.Name)
+ val nclazz = statics.newClass(ntype.Name.toTypeName)
+ val nmodule = statics.newModule(ntype.Name)
nclazz.setInfo(loader)
nmodule.setInfo(loader)
staticDefs.enter(nclazz)
@@ -447,7 +449,7 @@ abstract class TypeParser {
// first pass
for (mvarCILDef <- method.getSortedMVars() ) {
val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
- val mtpsym = methodSym.newTypeParameter(NoPosition, mtpname)
+ val mtpsym = methodSym.newTypeParameter(mtpname)
methodTParams.put(mvarCILDef.Number, mtpsym)
newMethodTParams += mtpsym
// TODO wouldn't the following also be needed later, i.e. during getCLRType
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 4e8e678dc8..b4ec8a23ce 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -77,31 +77,56 @@ abstract class AddInterfaces extends InfoTransform {
def implClassPhase = currentRun.erasurePhase.next
/** Return the implementation class of a trait; create a new one of one does not yet exist */
- def implClass(iface: Symbol): Symbol = implClassMap.getOrElse(iface, {
- atPhase(implClassPhase) {
- val implName = nme.implClassName(iface.name)
- var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol
- if (impl != NoSymbol && settings.XO.value) {
- log("unlinking impl class " + impl)
- iface.owner.info.decls.unlink(impl)
- impl = NoSymbol
- }
- if (impl == NoSymbol) {
- impl = iface.cloneSymbolImpl(iface.owner)
- impl.name = implName
- impl.sourceFile = iface.sourceFile
- if (iface.owner.isClass)
- iface.owner.info.decls enter impl
+ def implClass(iface: Symbol): Symbol = {
+ iface.info
+
+ implClassMap.getOrElse(iface, {
+ atPhase(implClassPhase) {
+ log("%s.implClass == %s".format(iface, iface.implClass))
+ val implName = nme.implClassName(iface.name)
+ var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol
+
+ // !!! Why does forcing the impl's info here lead to a crash?
+ // See test case pos/trait-force-info.scala for a minimization.
+ // It crashes like this:
+ //
+ // [log lazyvals] trait ContextTrees.implClass == class ContextTrees$class
+ // error: java.lang.AssertionError: assertion failed: (scala.tools.nsc.typechecker.Contexts$NoContext$,scala.tools.nsc.typechecker.Contexts,NoContext$,trait Contexts in package typechecker) / while parsing (/scala/trunk/build/pack/lib/scala-compiler.jar(scala/tools/nsc/interactive/ContextTrees$class.class),Some(class ContextTrees$class))trait Contexts.NoContext$ linkedModule: <none>List()
+
+ val originalImpl = impl
+ if (impl != NoSymbol) {
+ // Unlink a pre-existing symbol only if the implementation class is
+ // visible on the compilation classpath. In general this is true under
+ // -optimise and not otherwise, but the classpath can use arbitrary
+ // logic so the classpath must be queried.
+ if (classPath.context.isValidName(implName + ".class")) {
+ log("unlinking impl class " + impl)
+ iface.owner.info.decls.unlink(impl)
+ impl = NoSymbol
+ }
+ else log("not unlinking existing " + impl + " as the impl class is not visible on the classpath.")
+ }
+ if (impl == NoSymbol) {
+ impl = iface.cloneSymbolImpl(iface.owner)
+ impl.name = implName
+ impl.sourceFile = iface.sourceFile
+ if (iface.owner.isClass)
+ iface.owner.info.decls enter impl
+ }
+ if (currentRun.compiles(iface)) currentRun.symSource(impl) = iface.sourceFile
+ impl setPos iface.pos
+ impl.flags = iface.flags & ~(INTERFACE | lateINTERFACE) | IMPLCLASS
+ impl setInfo new LazyImplClassType(iface)
+ implClassMap(iface) = impl
+ debuglog(
+ "generating impl class " + impl.debugLocationString + " in " + iface.owner + (
+ if (originalImpl == NoSymbol) "" else " (cloned from " + originalImpl.debugLocationString + ")"
+ )
+ )
+ impl
}
- if (currentRun.compiles(iface)) currentRun.symSource(impl) = iface.sourceFile
- impl setPos iface.pos
- impl.flags = iface.flags & ~(INTERFACE | lateINTERFACE) | IMPLCLASS
- impl setInfo new LazyImplClassType(iface)
- implClassMap(iface) = impl
- debuglog("generating impl class " + impl + " in " + iface.owner)//debug
- impl
- }
- })
+ })
+ }
/** A lazy type to set the info of an implementation class
* The parents of an implementation class for trait iface are:
@@ -119,10 +144,13 @@ abstract class AddInterfaces extends InfoTransform {
* given the decls ifaceDecls of its interface.
*/
private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = {
- val decls = new Scope
+ val decls = newScope
if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol)
- decls enter (implClass.newMethod(implClass.pos, nme.MIXIN_CONSTRUCTOR)
- setInfo MethodType(List(), UnitClass.tpe))
+ decls enter (
+ implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
+ setInfo MethodType(Nil, UnitClass.tpe)
+ )
+
for (sym <- ifaceDecls.iterator) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
@@ -148,15 +176,15 @@ abstract class AddInterfaces extends InfoTransform {
*/
def mixinToImplClass(tp: Type): Type = erasure(sym,
tp match { //@MATN: no normalize needed (comes after erasure)
- case TypeRef(pre, sym, args) if sym.needsImplClass =>
- typeRef(pre, implClass(sym), args)
+ case TypeRef(pre, sym, _) if sym.needsImplClass =>
+ typeRef(pre, implClass(sym), Nil)
case _ =>
tp
}
)
def implType(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, _) =>
- assert(phase == implClassPhase)
+ assert(phase == implClassPhase, tp)
ClassInfoType(
ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe,
implDecls(sym, decls),
@@ -247,8 +275,8 @@ abstract class AddInterfaces extends InfoTransform {
addMixinConstructorDef(clazz, templ.body map implMemberDef))
.setSymbol(clazz.newLocalDummy(templ.pos))
}
- new ChangeOwnerTraverser(templ.symbol.owner, clazz)(
- new ChangeOwnerTraverser(templ.symbol, templ1.symbol)(templ1))
+ templ1.changeOwner(templ.symbol.owner -> clazz, templ.symbol -> templ1.symbol)
+ templ1
}
def implClassDefs(trees: List[Tree]): List[Tree] = {
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 98345dd01e..50e6139e65 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -22,9 +22,35 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
+ private def clearStatics() {
+ newStaticMembers.clear()
+ newStaticInits.clear()
+ symbolsStoredAsStatic.clear()
+ }
+ private def savingStatics[T](body: => T): T = {
+ val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
+ val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
+ val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
+ val result = body
+
+ clearStatics()
+ newStaticMembers ++= savedNewStaticMembers
+ newStaticInits ++= savedNewStaticInits
+ symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
+
+ result
+ }
+ private def transformTemplate(tree: Tree) = {
+ val Template(parents, self, body) = tree
+ clearStatics()
+ val newBody = transformTrees(body)
+ val templ = treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
+ try addStaticInits(templ) // postprocess to include static ctors
+ finally clearStatics()
+ }
private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
/** Kludge to provide a safe fix for #4560:
@@ -60,7 +86,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
private def typedWithPos(pos: Position)(tree: Tree) =
- localTyper typed { atPos(pos)(tree) }
+ localTyper.typedPos(pos)(tree)
/** A value class is defined to be only Java-compatible values: unit is
* not part of it, as opposed to isValueClass in definitions. scala.Int is
@@ -71,7 +97,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** The boxed type if it's a primitive; identity otherwise.
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
-
+
override def transform(tree: Tree): Tree = tree match {
/* Transforms dynamic calls (i.e. calls to methods that are undefined
@@ -106,6 +132,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* refinement, where the refinement defines a parameter based on a
* type variable. */
case ad@ApplyDynamic(qual0, params) =>
+ if (settings.logReflectiveCalls.value)
+ unit.echo(ad.pos, "method invocation uses reflection")
+
val typedPos = typedWithPos(ad.pos) _
assert(ad.symbol.isPublic)
@@ -113,16 +142,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### CREATING THE METHOD CACHE ### */
- def addStaticVariableToClass(forName: String, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
- val varSym = (
- currentClass.newVariable(ad.pos, mkTerm(forName))
- setFlag (PRIVATE | STATIC | SYNTHETIC)
- setInfo (forType)
+ def addStaticVariableToClass(forName: TermName, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
+ val flags = PRIVATE | STATIC | SYNTHETIC | (
+ if (isFinal) FINAL else 0
)
- if (isFinal) varSym setFlag FINAL
- else varSym.addAnnotation(VolatileAttr)
+
+ val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+ if (!isFinal)
+ varSym.addAnnotation(VolatileAttr)
- currentClass.info.decls enter varSym
val varDef = typedPos( VAL(varSym) === forInit )
newStaticMembers append transform(varDef)
@@ -134,13 +162,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def addStaticMethodToClass(forName: String, forArgsTypes: List[Type], forResultType: Type)
(forBody: Pair[Symbol, List[Symbol]] => Tree): Symbol = {
- val methSym = currentClass.newMethod(ad.pos, mkTerm(forName))
- .setFlag(STATIC | SYNTHETIC)
- methSym.setInfo(MethodType(methSym.newSyntheticValueParams(forArgsTypes), forResultType))
- currentClass.info.decls enter methSym
+ val methSym = currentClass.newMethod(mkTerm(forName), ad.pos, STATIC | SYNTHETIC)
+ val params = methSym.newSyntheticValueParams(forArgsTypes)
+ methSym setInfoAndEnter MethodType(params, forResultType)
- val methDef = typedPos( DefDef(methSym, { forBody(Pair(methSym, methSym.paramss(0))) }) )
+ val methDef = typedPos( DefDef(methSym, forBody(methSym -> params)) )
newStaticMembers append transform(methDef)
methSym
@@ -165,9 +192,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
case Pair(reflMethodSym, List(forReceiverSym)) =>
(REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))
}
@@ -194,18 +221,18 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
val reflMethodCacheSym: Symbol =
- addStaticVariableToClass("reflMethod$Cache", MethodClass.tpe, NULL, false)
+ addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
val reflClassCacheSym: Symbol =
- addStaticVariableToClass("reflClass$Cache", SoftReferenceClass.tpe, NULL, false)
+ addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
def isCacheEmpty(receiver: Symbol): Tree =
reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
case Pair(reflMethodSym, List(forReceiverSym)) =>
BLOCK(
IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
@@ -241,15 +268,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = addStaticVariableToClass("reflPoly$Cache", SoftReferenceClass.tpe, mkNewPolyCache, false)
+ val reflPolyCacheSym: Symbol = (
+ addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
+ )
def getPolyCache = gen.mkCast(fn(safeREF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe)
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe)
{ case Pair(reflMethodSym, List(forReceiverSym)) =>
- val methodSym = reflMethodSym.newVariable(ad.pos, mkTerm("method")) setInfo MethodClass.tpe
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
BLOCK(
IF (getPolyCache OBJ_EQ NULL) THEN (safeREF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
@@ -271,59 +300,14 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
- val testForNumber: Tree => Tree = {
- qual1 => (qual1 IS_OBJ BoxedNumberClass.tpe) OR (qual1 IS_OBJ BoxedCharacterClass.tpe)
- }
- val testForBoolean: Tree => Tree = {
- qual1 => (qual1 IS_OBJ BoxedBooleanClass.tpe)
- }
- val testForNumberOrBoolean: Tree => Tree = {
- qual1 => testForNumber(qual1) OR testForBoolean(qual1)
- }
-
- def postfixTest(name: Name): Option[(String, Tree => Tree)] = {
- var runtimeTest: Tree => Tree = testForNumber
- val newName = name match {
- case nme.UNARY_! => runtimeTest = testForBoolean ; "takeNot"
- case nme.UNARY_+ => "positive"
- case nme.UNARY_- => "negate"
- case nme.UNARY_~ => "complement"
- case nme.toByte => "toByte"
- case nme.toShort => "toShort"
- case nme.toChar => "toCharacter"
- case nme.toInt => "toInteger"
- case nme.toLong => "toLong"
- case nme.toFloat => "toFloat"
- case nme.toDouble => "toDouble"
- case _ => return None
- }
- Some((newName, runtimeTest))
- }
- def infixTest(name: Name): Option[(String, Tree => Tree)] = {
- val (newName, runtimeTest) = name match {
- case nme.OR => ("takeOr", testForNumberOrBoolean)
- case nme.XOR => ("takeXor", testForNumberOrBoolean)
- case nme.AND => ("takeAnd", testForNumberOrBoolean)
- case nme.EQ => ("testEqual", testForNumberOrBoolean)
- case nme.NE => ("testNotEqual", testForNumberOrBoolean)
- case nme.ADD => ("add", testForNumber)
- case nme.SUB => ("subtract", testForNumber)
- case nme.MUL => ("multiply", testForNumber)
- case nme.DIV => ("divide", testForNumber)
- case nme.MOD => ("takeModulo", testForNumber)
- case nme.LSL => ("shiftSignedLeft", testForNumber)
- case nme.LSR => ("shiftLogicalRight", testForNumber)
- case nme.ASR => ("shiftSignedRight", testForNumber)
- case nme.LT => ("testLessThan", testForNumber)
- case nme.LE => ("testLessOrEqualThan", testForNumber)
- case nme.GE => ("testGreaterOrEqualThan", testForNumber)
- case nme.GT => ("testGreaterThan", testForNumber)
- case nme.ZOR => ("takeConditionalOr", testForBoolean)
- case nme.ZAND => ("takeConditionalAnd", testForBoolean)
- case _ => return None
- }
- Some((newName, runtimeTest))
- }
+ def testForName(name: Name): Tree => Tree = t => (
+ if (nme.CommonOpNames(name))
+ gen.mkMethodCall(getMember(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean), t :: Nil)
+ else if (nme.BooleanOpNames(name))
+ t IS_OBJ BoxedBooleanClass.tpe
+ else
+ gen.mkMethodCall(getMember(BoxesRunTimeClass, nme.isBoxedNumber), t :: Nil)
+ )
/** The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
@@ -332,12 +316,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* (If the compiler can verify qual is safe to inline, it will not create the block.)
*/
def getPrimitiveReplacementForStructuralCall(name: Name): Option[(Symbol, Tree => Tree)] = {
- val opt = (
- if (params.isEmpty) postfixTest(name)
- else if (params.tail.isEmpty) infixTest(name)
- else None
+ val methodName = (
+ if (params.isEmpty) nme.primitivePostfixMethodName(name)
+ else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
+ else nme.NO_NAME
)
- opt map { case (name, fn) => (getMember(BoxesRunTimeClass, name), fn) }
+ if (methodName == nme.NO_NAME) None
+ else Some((getMember(BoxesRunTimeClass, methodName), testForName(name)))
}
/* ### BOXING PARAMS & UNBOXING RESULTS ### */
@@ -417,7 +402,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
// exception catching machinery
- val invokeExc = currentOwner.newValue(ad.pos, mkTerm("")) setInfo InvocationTargetExceptionClass.tpe
+ val invokeExc = currentOwner.newValue(mkTerm(""), ad.pos) setInfo InvocationTargetExceptionClass.tpe
def catchVar = Bind(invokeExc, Typed(Ident(nme.WILDCARD), TypeTree(InvocationTargetExceptionClass.tpe)))
def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
@@ -502,9 +487,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* expected to be an AnyRef. */
val t: Tree = ad.symbol.tpe match {
case MethodType(mparams, resType) =>
- assert(params.length == mparams.length)
+ assert(params.length == mparams.length, mparams)
+
typedPos {
- val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
qual = safeREF(sym)
BLOCK(
@@ -554,31 +540,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- var savedNewStaticMembers : mutable.Buffer[Tree] = null
- var savedNewStaticInits : mutable.Buffer[Tree] = null
- var savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = null
- if(forMSIL) {
- savedNewStaticMembers = newStaticMembers.clone
- savedNewStaticInits = newStaticInits.clone
- savedSymbolsStoredAsStatic = symbolsStoredAsStatic.clone
- }
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- val transformedTemplate: Template = {
- var newBody = transformTrees(body)
- treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
- }
- val res = addStaticInits(transformedTemplate) // postprocess to include static ctors
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- if(forMSIL) {
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
- }
- res
+ if (forMSIL) savingStatics( transformTemplate(tree) )
+ else transformTemplate(tree)
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
@@ -600,7 +563,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case theTry @ Try(block, catches, finalizer)
if theTry.tpe.typeSymbol != definitions.UnitClass && theTry.tpe.typeSymbol != definitions.NothingClass =>
val tpe = theTry.tpe.widen
- val tempVar = currentOwner.newVariable(theTry.pos, mkTerm(nme.EXCEPTION_RESULT_PREFIX)).setInfo(tpe)
+ val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
val newBlock = assignBlock(block)
@@ -641,15 +604,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* And, finally, be advised - scala symbol literal and the Symbol class of the compiler
* have little in common.
*/
- case symapp @ Apply(Select(Select(a @ Ident(nme.scala_), b @ nme.Symbol), nme.apply),
- List(Literal(Constant(symname: String)))) =>
+ case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
// add the symbol name to a map if it's not there already
- val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
- val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
-
+ val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+ val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
// create a reference to a static field
- val ntree = typedWithPos(symapp.pos)(safeREF(staticFieldSym))
-
+ val ntree = typedWithPos(tree.pos)(safeREF(staticFieldSym))
super.transform(ntree)
// This transform replaces Array(Predef.wrapArray(Array(...)), <manifest>)
@@ -669,19 +629,20 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* If it doesn't exist, i.e. the symbol is encountered the first time,
* it creates a new static field definition and initialization and returns it.
*/
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol = {
symbolsStoredAsStatic.getOrElseUpdate(symname, {
val theTyper = typer.atOwner(tree, currentClass)
// create a symbol for the static field
- val stfieldSym = currentClass.newVariable(pos, mkTerm("symbol$"))
- .setFlag(PRIVATE | STATIC | SYNTHETIC | FINAL)
- .setInfo(symbolType)
+ val stfieldSym = (
+ currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
+ setInfo SymbolClass.tpe
+ )
currentClass.info.decls enter stfieldSym
// create field definition and initialization
- val stfieldDef = theTyper.typed { atPos(pos)(VAL(stfieldSym) === rhs) }
- val stfieldInit = theTyper.typed { atPos(pos)(safeREF(stfieldSym) === rhs) }
+ val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+ val stfieldInit = theTyper.typedPos(pos)(safeREF(stfieldSym) === rhs)
// add field definition to new defs
newStaticMembers append stfieldDef
@@ -689,6 +650,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
stfieldSym
})
+ }
/* finds the static ctor DefDef tree within the template if it exists. */
private def findStaticCtor(template: Template): Option[Tree] =
@@ -700,7 +662,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* changes the template for the class so that it contains a static constructor with symbol fields inits,
* augments an existing static ctor if one already existed.
*/
- private def addStaticInits(template: Template): Template =
+ private def addStaticInits(template: Template): Template = {
if (newStaticInits.isEmpty)
template
else {
@@ -722,11 +684,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
val rhs = Block(newStaticInits.toList, Literal(Constant()))
- val staticCtorTree = DefDef(staticCtorSym, rhs)
- localTyper.typed { atPos(template.pos)(staticCtorTree) }
+
+ localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
}
+ }
} // CleanUpTransformer
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 342c298e1d..d1c71faf1e 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -105,8 +105,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer.transform(
- new ChangeOwnerTraverser(oldowner, constr.symbol)(tree))
+ intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
// Should tree be moved in front of super constructor call?
def canBeMoved(tree: Tree) = tree match {
@@ -254,26 +253,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
for ((accSym, accBody) <- outerAccessors)
if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
- // Conflicting symbol list from parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val parentSymbols = Map((for {
- p <- impl.parents
- if p.symbol.isTrait
- sym <- p.symbol.info.nonPrivateMembers
- if sym.isGetter && !sym.isOuterField
- } yield sym.name -> p): _*)
-
// Initialize all parameters fields that must be kept.
- val paramInits =
- for (acc <- paramAccessors if mustbeKept(acc)) yield {
- if (parentSymbols contains acc.name)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s in '%s'".format(
- acc.name, acc.name, parentSymbols(acc.name)))
-
- copyParam(acc, parameter(acc))
- }
+ val paramInits = paramAccessors filter mustbeKept map { acc =>
+ // Check for conflicting symbol amongst parents: see bug #1960.
+ // It would be better to mangle the constructor parameter name since
+ // it can only be used internally, but I think we need more robust name
+ // mangling before we introduce more of it.
+ val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+ if (conflict ne NoSymbol)
+ unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+
+ copyParam(acc, parameter(acc))
+ }
/** Return a single list of statements, merging the generic class constructor with the
* specialized stats. The original statements are retyped in the current class, and
@@ -299,11 +290,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
* be an error to pass it to array_update(.., .., Object).
*/
def rewriteArrayUpdate(tree: Tree): Tree = {
- val array_update = definitions.ScalaRunTimeModule.info.member("array_update")
val adapter = new Transformer {
override def transform(t: Tree): Tree = t match {
- case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == array_update =>
- localTyper.typed(Apply(gen.mkAttributedSelect(xs, definitions.Array_update), List(idx, v)))
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
case _ => super.transform(t)
}
}
@@ -402,11 +392,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
/** Create a getter or a setter and enter into `clazz` scope
*/
def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
- val m = (
- clazz.newMethod(sym.pos, name)
- setFlag (flags & ~LOCAL & ~PRIVATE)
- setPrivateWithin clazz
- )
+ val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
clazz.info.decls enter m
}
@@ -414,12 +400,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val getr = addAccessor(
sym, nme.getterName(sym.name), getterFlags(sym.flags))
getr setInfo MethodType(List(), sym.tpe)
- defBuf += localTyper.typed {
- //util.trace("adding getter def for "+getr) {
- atPos(sym.pos) {
- DefDef(getr, Select(This(clazz), sym))
- }//}
- }
+ defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
getr
}
@@ -463,28 +444,24 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def delayedInitClosure(stats: List[Tree]) =
localTyper.typed {
atPos(impl.pos) {
- val closureClass = clazz.newClass(impl.pos, nme.delayedInitArg.toTypeName)
- .setFlag(SYNTHETIC | FINAL)
+ val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
val closureParents = List(AbstractFunctionClass(0).tpe, ScalaObjectClass.tpe)
- closureClass.setInfo(new ClassInfoType(closureParents, new Scope, closureClass))
-
- val outerField = closureClass.newValue(impl.pos, nme.OUTER)
- .setFlag(PrivateLocal | PARAMACCESSOR)
- .setInfo(clazz.tpe)
-
- val applyMethod = closureClass.newMethod(impl.pos, nme.apply)
- .setFlag(FINAL)
- .setInfo(MethodType(List(), ObjectClass.tpe))
-
- closureClass.info.decls enter outerField
- closureClass.info.decls enter applyMethod
-
- val outerFieldDef = ValDef(outerField)
-
- val changeOwner = new ChangeOwnerTraverser(impl.symbol, applyMethod)
+ closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
+
+ val outerField = (
+ closureClass
+ newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
+ setInfoAndEnter clazz.tpe
+ )
+ val applyMethod = (
+ closureClass
+ newMethod(nme.apply, impl.pos, FINAL)
+ setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
+ )
+ val outerFieldDef = ValDef(outerField)
val closureClassTyper = localTyper.atOwner(closureClass)
- val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
+ val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
val constrStatTransformer = new Transformer {
override def transform(tree: Tree): Tree = tree match {
@@ -516,8 +493,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
else tree
case _ =>
- changeOwner.changeOwner(tree)
- tree
+ tree.changeOwner(impl.symbol -> applyMethod)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 1a421eb82f..5f84d765b9 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -44,22 +44,16 @@ abstract class Erasure extends AddInterfaces
// class object is that of java.lang.Integer, not Int.
//
// TODO: If T is final, return type could be Class[T]. Should it?
- def getClassReturnType(tp: Type): Type = {
- val sym = tp.typeSymbol
-
- if (phase.erasedTypes) ClassClass.tpe
- else if (isValueClass(sym)) ClassType(tp.widen)
- else {
- val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
- val upperBound = (
- if (isPhantomClass(sym)) AnyClass.tpe
+ def getClassReturnType(tpe: Type): Type = {
+ if (phase.erasedTypes) ClassClass.tpe else {
+ val tp = tpe.widen.normalize
+ val sym = tp.typeSymbol
+
+ if (isValueClass(sym)) ClassType(tp)
+ else boundedClassType(
+ if (isPhantomClass(sym)) ObjectClass.tpe
else if (sym.isLocalClass) intersectionDominator(tp.parents)
- else tp.widen
- )
-
- existentialAbstraction(
- eparams,
- ClassType(eparams.head setInfo TypeBounds.upper(upperBound) tpe)
+ else tp
)
}
}
@@ -742,7 +736,7 @@ abstract class Erasure extends AddInterfaces
//println("computing bridges for " + owner)//DEBUG
assert(phase == currentRun.erasurePhase)
val site = owner.thisType
- val bridgesScope = new Scope
+ val bridgesScope = newScope
val bridgeTarget = new mutable.HashMap[Symbol, Symbol]
var bridges: List[Tree] = List()
val opc = atPhase(currentRun.explicitouterPhase) {
@@ -768,10 +762,8 @@ abstract class Erasure extends AddInterfaces
}
);
if (bridgeNeeded) {
- val bridge = other.cloneSymbolImpl(owner)
- .setPos(owner.pos)
- .setFlag(member.flags | BRIDGE)
- .resetFlag(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val bridge = other.cloneSymbolImpl(owner, newFlags) setPos owner.pos
// the parameter symbols need to have the new owner
bridge.setInfo(otpe.cloneInfo(bridge))
bridgeTarget(bridge) = member
@@ -799,7 +791,7 @@ abstract class Erasure extends AddInterfaces
// && (bridge.paramss.nonEmpty && bridge.paramss.head.nonEmpty && bridge.paramss.head.tail.isEmpty) // does the first argument list has exactly one argument -- for user-defined unapplies we can't be sure
&& !(atPhase(phase.next)(member.tpe <:< other.tpe))) { // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
- val typeTest = gen.mkIsInstanceOf(REF(bridge.paramss.head.head), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry
+ val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry
// println("unapp type test: "+ typeTest)
IF (typeTest) THEN bridgingCall ELSE REF(NoneModule)
} else bridgingCall
@@ -866,7 +858,7 @@ abstract class Erasure extends AddInterfaces
unboundedGenericArrayLevel(arg.tpe) > 0) =>
val level = unboundedGenericArrayLevel(arg.tpe)
def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
+ gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
global.typer.typedPos(tree.pos) {
if (level == 1) isArrayTest(qual)
@@ -887,19 +879,30 @@ abstract class Erasure extends AddInterfaces
fun.symbol != Object_isInstanceOf) =>
// leave all other type tests/type casts, remove all other type applications
preErase(fun)
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
+ case Apply(fn @ Select(qual, name), args) if fn.symbol.owner == ArrayClass =>
+ // Have to also catch calls to abstract types which are bounded by Array.
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
// convert calls to apply/update/length on generic arrays to
// calls of ScalaRunTime.array_xxx method calls
- global.typer.typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
- else
+ global.typer.typedPos(tree.pos)({
+ val arrayMethodName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ }
+ gen.mkRuntimeCall(arrayMethodName, qual :: args)
+ })
+ }
+ else {
// store exact array erasure in map to be retrieved later when we might
// need to do the cast in adaptMember
treeCopy.Apply(
tree,
SelectFromArray(qual, name, erasure(tree.symbol, qual.tpe)).copyAttrs(fn),
args)
-
+ }
case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) =>
if (fn.symbol == Any_## || fn.symbol == Object_##) {
// This is unattractive, but without it we crash here on ().## because after
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index fcc03a82d0..7f7f7e7b65 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -68,6 +68,8 @@ abstract class ExplicitOuter extends InfoTransform
result
}
+
+ private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
@@ -89,6 +91,21 @@ abstract class ExplicitOuter extends InfoTransform
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
else clazz.info.decls find (_.outerSource == clazz) getOrElse NoSymbol
}
+ def newOuterAccessor(clazz: Symbol) = {
+ val accFlags = SYNTHETIC | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val sym = clazz.newMethodSymbol(nme.OUTER, clazz.pos, accFlags)
+ val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+
+ sym expandName clazz
+ sym.referenced = clazz
+ sym setInfo MethodType(Nil, restpe)
+ }
+ def newOuterField(clazz: Symbol) = {
+ val accFlags = SYNTHETIC | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
+ val sym = clazz.newValue(nme.OUTER_LOCAL, clazz.pos, accFlags)
+
+ sym setInfo clazz.outerClass.thisType
+ }
/** <p>
* The type transformation method:
@@ -134,7 +151,7 @@ abstract class ExplicitOuter extends InfoTransform
}
if (sym.owner.isTrait) sym setNotFlag PROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
- val p = sym.newValueParameter(sym.pos, "arg" + nme.OUTER)
+ val p = sym.newValueParameter(innerClassConstructorParamName, sym.pos)
.setInfo(sym.owner.outerClass.thisType)
MethodType(p :: params, restpe)
} else if (restpe ne restpe1)
@@ -144,27 +161,20 @@ abstract class ExplicitOuter extends InfoTransform
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
- val outerAcc = clazz.newMethod(clazz.pos, nme.OUTER) // 3
+ val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
outerAcc expandName clazz
-
- val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
- decls1 enter (clazz.newOuterAccessor(clazz.pos) setInfo MethodType(Nil, restpe))
- if (hasOuterField(clazz)) { //2
- val access = if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED
- decls1 enter (
- clazz.newValue(clazz.pos, nme.OUTER_LOCAL)
- setFlag (SYNTHETIC | PARAMACCESSOR | access)
- setInfo clazz.outerClass.thisType
- )
- }
+
+ decls1 enter newOuterAccessor(clazz)
+ if (hasOuterField(clazz)) //2
+ decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
val mixinOuterAcc: Symbol = atPhase(phase.next)(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
if (decls1 eq decls) decls1 = decls.cloneScope
- val newAcc = mixinOuterAcc.cloneSymbol(clazz)
- newAcc resetFlag DEFERRED setInfo (clazz.thisType memberType mixinOuterAcc)
+ val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
+ newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
decls1 enter newAcc
}
}
@@ -370,15 +380,13 @@ abstract class ExplicitOuter extends InfoTransform
def makeGuardDef(vs: List[Symbol], guard: Tree) = {
val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(tree.pos, gdname) setFlag SYNTHETIC
- val fmls = vs map (_.tpe)
- val tpe = new MethodType(method newSyntheticValueParams fmls, BooleanClass.tpe)
- method setInfo tpe
-
- localTyper typed (DEF(method) === {
- new ChangeOwnerTraverser(currentOwner, method) traverse guard
- new TreeSymSubstituter(vs, method.paramss.head) transform (guard)
- })
+ val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
+ val params = method newSyntheticValueParams vs.map(_.tpe)
+ method setInfo new MethodType(params, BooleanClass.tpe)
+
+ localTyper typed {
+ DEF(method) === guard.changeOwner(currentOwner -> method).substTreeSyms(vs zip params: _*)
+ }
}
val nguard = new ListBuffer[Tree]
@@ -419,7 +427,7 @@ abstract class ExplicitOuter extends InfoTransform
}
val t = atPos(tree.pos) {
- val context = MatrixContext(currentRun.currentUnit, transform, localTyper, currentOwner, tree.tpe)
+ val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
/* if @switch annotation is present, verify the resulting tree is a Match */
@@ -473,7 +481,7 @@ abstract class ExplicitOuter extends InfoTransform
val vparamss1 =
if (isInner(clazz)) { // (4)
val outerParam =
- sym.newValueParameter(sym.pos, nme.OUTER) setInfo outerField(clazz).info
+ sym.newValueParameter(nme.OUTER, sym.pos) setInfo outerField(clazz).info
((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail
} else vparamss
super.transform(treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, rhs))
@@ -498,7 +506,7 @@ abstract class ExplicitOuter extends InfoTransform
val outerVal = atPos(tree.pos)(qual match {
// it's a call between constructors of same class
case _: This =>
- assert(outerParam != NoSymbol)
+ assert(outerParam != NoSymbol, tree)
outerValue
case _ =>
gen.mkAttributedQualifier(qual.tpe.prefix match {
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index b17fd7b9b0..89f1cc26e0 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -60,12 +60,12 @@ abstract class Flatten extends InfoTransform {
private val flattened = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if isFlattenablePrefix(pre) =>
- assert(args.isEmpty && sym.toplevelClass != NoSymbol, sym.ownerChain)
- typeRef(sym.toplevelClass.owner.thisType, sym, Nil)
+ assert(args.isEmpty && sym.enclosingTopLevelClass != NoSymbol, sym.ownerChain)
+ typeRef(sym.enclosingTopLevelClass.owner.thisType, sym, Nil)
case ClassInfoType(parents, decls, clazz) =>
var parents1 = parents
val decls1 = scopeTransform(clazz) {
- val decls1 = new Scope()
+ val decls1 = newScope
if (clazz.isPackageClass) {
atPhase(phase.next)(decls foreach (decls1 enter _))
} else {
@@ -119,7 +119,7 @@ abstract class Flatten extends InfoTransform {
val sym = tree.symbol
val tree1 = tree match {
case ClassDef(_, _, _, _) if sym.isNestedClass =>
- liftedDefs(sym.toplevelClass.owner) += tree
+ liftedDefs(sym.enclosingTopLevelClass.owner) += tree
EmptyTree
case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
atPhase(phase.next) {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 2310eae9bb..4fc7b9f92f 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -9,7 +9,8 @@ package transform
import symtab._
import Flags._
import util.TreeSet
-import scala.collection.mutable.{ LinkedHashMap, ListBuffer }
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.LinkedHashMap
abstract class LambdaLift extends InfoTransform {
import global._
@@ -17,6 +18,19 @@ abstract class LambdaLift extends InfoTransform {
/** the following two members override abstract members in Transform */
val phaseName: String = "lambdalift"
+
+ /** Converts types of captured variables to *Ref types.
+ */
+ def boxIfCaptured(sym: Symbol, tpe: Type, erasedTypes: Boolean) =
+ if (sym.isCapturedVariable) {
+ val symClass = tpe.typeSymbol
+ def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
+ if (isValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
+ else if (erasedTypes) objectRefClass.tpe
+ else appliedType(objectRefClass.typeConstructor, List(tpe))
+ if (sym.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
+ else refType(refClass, ObjectRefClass)
+ } else tpe
private val lifted = new TypeMap {
def apply(tp: Type): Type = tp match {
@@ -31,7 +45,8 @@ abstract class LambdaLift extends InfoTransform {
}
}
- def transformInfo(sym: Symbol, tp: Type): Type = lifted(tp)
+ def transformInfo(sym: Symbol, tp: Type): Type =
+ boxIfCaptured(sym, lifted(tp), erasedTypes = true)
protected def newTransformer(unit: CompilationUnit): Transformer =
new LambdaLifter(unit)
@@ -50,12 +65,20 @@ abstract class LambdaLift extends InfoTransform {
/** The set of symbols that need to be renamed. */
private val renamable = newSymSet
+ // (trait, name) -> owner
+ private val localTraits = mutable.HashMap[(Symbol, Name), Symbol]()
+ // (owner, name) -> implClass
+ private val localImplClasses = mutable.HashMap[(Symbol, Name), Symbol]()
+
/** A flag to indicate whether new free variables have been found */
private var changedFreeVars: Boolean = _
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
-
+
+ /** True if we are transforming under a ReferenceToBoxed node */
+ private var isBoxedRef = false
+
private type SymSet = TreeSet[Symbol]
private def newSymSet = new TreeSet[Symbol](_ isLess _)
@@ -116,22 +139,7 @@ abstract class LambdaLift extends InfoTransform {
}
changedFreeVars = true
debuglog("" + sym + " is free in " + enclosure);
- if (sym.isVariable && !sym.hasFlag(CAPTURED)) {
- // todo: We should merge this with the lifting done in liftCode.
- // We do have to lift twice: in liftCode, because Code[T] needs to see the lifted version
- // and here again because lazy bitmaps are introduced later and get lifted here.
- // But we should factor out the code and run it twice.
- sym setFlag CAPTURED
- val symClass = sym.tpe.typeSymbol
- atPhase(phase.next) {
- sym updateInfo (
- if (sym.hasAnnotation(VolatileAttr))
- if (isValueClass(symClass)) volatileRefClass(symClass).tpe else VolatileObjectRefClass.tpe
- else
- if (isValueClass(symClass)) refClass(symClass).tpe else ObjectRefClass.tpe
- )
- }
- }
+ if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
}
@@ -150,7 +158,26 @@ abstract class LambdaLift extends InfoTransform {
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
- if (sym.isLocal) renamable addEntry sym
+ if (sym.isLocal) {
+ // Don't rename implementation classes independently of their interfaces. If
+ // the interface is to be renamed, then we will rename the implementation
+ // class at that time. You'd think we could call ".implClass" on the trait
+ // rather than collecting them in another map, but that seems to fail for
+ // exactly the traits being renamed here (i.e. defined in methods.)
+ //
+ // !!! - it makes no sense to have methods like "implClass" and
+ // "companionClass" which fail for an arbitrary subset of nesting
+ // arrangements, and then have separate methods which attempt to compensate
+ // for that failure. There should be exactly one method for any given
+ // entity which always gives the right answer.
+ if (sym.isImplClass)
+ localImplClasses((sym.owner, nme.interfaceName(sym.name))) = sym
+ else {
+ renamable addEntry sym
+ if (sym.isTrait)
+ localTraits((sym, sym.name)) = sym.owner
+ }
+ }
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocal) {
renamable addEntry sym
@@ -194,8 +221,8 @@ abstract class LambdaLift extends InfoTransform {
for (caller <- called.keys ; callee <- called(caller) ; fvs <- free get callee ; fv <- fvs)
markFree(fv, caller)
} while (changedFreeVars)
-
- for (sym <- renamable) {
+
+ def renameSym(sym: Symbol) {
val originalName = sym.name
val base = sym.name + nme.NAME_JOIN_STRING + (
if (sym.isAnonymousFunction && sym.owner.isMethod)
@@ -209,19 +236,42 @@ abstract class LambdaLift extends InfoTransform {
debuglog("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name))
}
+ /** Rename a trait's interface and implementation class in coordinated fashion.
+ */
+ def renameTrait(traitSym: Symbol, implSym: Symbol) {
+ val originalImplName = implSym.name
+ renameSym(traitSym)
+ implSym.name = nme.implClassName(traitSym.name)
+
+ debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name))
+ }
+
+ for (sym <- renamable) {
+ // If we renamed a trait from Foo to Foo$1, we must rename the implementation
+ // class from Foo$class to Foo$1$class. (Without special consideration it would
+ // become Foo$class$1 instead.) Since the symbols are being renamed out from
+ // under us, and there's no reliable link between trait symbol and impl symbol,
+ // we have maps from ((trait, name)) -> owner and ((owner, name)) -> impl.
+ localTraits remove ((sym, sym.name)) match {
+ case None => renameSym(sym)
+ case Some(owner) =>
+ localImplClasses remove ((owner, sym.name)) match {
+ case Some(implSym) => renameTrait(sym, implSym)
+ case _ => renameSym(sym) // pure interface, no impl class
+ }
+ }
+ }
+
atPhase(phase.next) {
for ((owner, freeValues) <- free.toList) {
+ val newFlags = SYNTHETIC | ( if (owner.isClass) PARAMACCESSOR | PrivateLocal else PARAM )
debuglog("free var proxy: %s, %s".format(owner.fullLocationString, freeValues.toList.mkString(", ")))
-
- proxies(owner) =
- for (fv <- freeValues.toList) yield {
- val proxy = owner.newValue(owner.pos, fv.name)
- .setFlag(if (owner.isClass) PARAMACCESSOR | PrivateLocal else PARAM)
- .setFlag(SYNTHETIC)
- .setInfo(fv.info);
- if (owner.isClass) owner.info.decls enter proxy;
- proxy
- }
+ proxies(owner) =
+ for (fv <- freeValues.toList) yield {
+ val proxy = owner.newValue(fv.name, owner.pos, newFlags) setInfo fv.info
+ if (owner.isClass) owner.info.decls enter proxy
+ proxy
+ }
}
}
}
@@ -340,7 +390,7 @@ abstract class LambdaLift extends InfoTransform {
EmptyTree
}
- private def postTransform(tree: Tree): Tree = {
+ private def postTransform(tree: Tree, isBoxedRef: Boolean = false): Tree = {
val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
@@ -363,8 +413,19 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
+ /** Wrap expr argument in new *Ref(..) constructor, but make
+ * sure that Try expressions stay at toplevel.
+ */
+ def refConstr(expr: Tree): Tree = expr match {
+ case Try(block, catches, finalizer) =>
+ Try(refConstr(block), catches map refConstrCase, finalizer)
+ case _ =>
+ Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(expr))
+ }
+ def refConstrCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(constructorArg))
+ refConstr(constructorArg)
})
} else tree
case Return(Block(stats, value)) =>
@@ -388,7 +449,7 @@ abstract class LambdaLift extends InfoTransform {
atPos(tree.pos)(proxyRef(sym))
else tree
else tree
- if (sym.isCapturedVariable)
+ if (sym.isCapturedVariable && !isBoxedRef)
atPos(tree.pos) {
val tp = tree.tpe
val elemTree = typer typed Select(tree1 setType sym.tpe, nme.elem)
@@ -396,20 +457,26 @@ abstract class LambdaLift extends InfoTransform {
}
else tree1
case Block(stats, expr0) =>
- val (lzyVals, rest) = stats.partition {
- case stat@ValDef(_, _, _, _) if stat.symbol.isLazy => true
- case stat@ValDef(_, _, _, _) if stat.symbol.hasFlag(MODULEVAR) => true
- case _ => false
- }
- treeCopy.Block(tree, lzyVals:::rest, expr0)
+ val (lzyVals, rest) = stats partition {
+ case stat: ValDef => stat.symbol.isLazy || stat.symbol.isModuleVar
+ case _ => false
+ }
+ if (lzyVals.isEmpty) tree
+ else treeCopy.Block(tree, lzyVals ::: rest, expr0)
case _ =>
tree
}
}
+
+ private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
- override def transform(tree: Tree): Tree =
- postTransform(super.transform(tree) setType lifted(tree.tpe))
-
+ override def transform(tree: Tree): Tree = tree match {
+ case ReferenceToBoxed(idt) =>
+ postTransform(preTransform(idt), isBoxedRef = true)
+ case _ =>
+ postTransform(preTransform(tree))
+ }
+
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 5452087aa3..f8c5f5bfc6 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -246,7 +246,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(meth.pos, nme.newBitmapName(nme.BITMAP_NORMAL, n)).setInfo(IntClass.tpe)
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(IntClass.tpe)
atPhase(currentRun.typerPhase) {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
deleted file mode 100644
index 9404f0f699..0000000000
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ /dev/null
@@ -1,587 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package transform
-
-import symtab._
-import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.FreshNameCreator
-import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
-
-/**
- * Translate expressions of the form reflect.Code.lift(exp)
- * to the reified "reflect trees" representation of exp.
- * Also: mutable variables that are accessed from a local function are wrapped in refs.
- *
- * @author Martin Odersky
- * @version 2.10
- */
-abstract class LiftCode extends Transform with TypingTransformers {
-
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{ typed, atOwner } // methods to type trees
-
- val symbols: global.type = global
-
- /** the following two members override abstract members in Transform */
- val phaseName: String = "liftcode"
-
- def newTransformer(unit: CompilationUnit): Transformer =
- new Codifier(unit)
-
- private lazy val MirrorMemberNames =
- ReflectRuntimeMirror.info.nonPrivateMembers filter (_.isTerm) map (_.toString) toSet
-
- // Would be nice if we could use something like this to check the names,
- // but it seems that info is unavailable when I need it.
- private def mirrorFactoryName(value: Any): Option[String] = value match {
- // Modest (inadequate) sanity check that there's a member by this name.
- case x: Product if MirrorMemberNames(x.productPrefix) =>
- Some(x.productPrefix)
- case _ =>
- Some(value.getClass.getName split """[$.]""" last) filter MirrorMemberNames
- }
- private def isMirrorMemberObject(value: Product) = value match {
- case NoType | NoPrefix | NoPosition | EmptyTree => true
- case _ => false
- }
-
- class Codifier(unit: CompilationUnit) extends TypingTransformer(unit) {
-
- val reifyDebug = settings.Yreifydebug.value
- val debugTrace = util.trace when reifyDebug
-
- val reifyCopypaste = settings.Yreifycopypaste.value
- def printCopypaste(tree: Tree) {
- import scala.reflect.api.Modifier
- import scala.reflect.api.Modifier._
-
- def copypasteModifier(mod: Modifier.Value): String = mod match {
- case mod @ (
- `protected` | `private` | `override` |
- `abstract` | `final` | `sealed` |
- `implicit` | `lazy` | `macro` |
- `case` | `trait`) => "`" + mod.toString + "`"
- case mod => mod.toString
- }
-
- for (line <- (tree.toString.split(Properties.lineSeparator) drop 2 dropRight 1)) {
- var s = line.trim
- s = s.replace("$mr.", "")
- s = s.replace(".apply", "")
- s = s.replace("scala.collection.immutable.", "")
- s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
- s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
- s = s.replace("immutable.this.Nil", "List()")
- s = s.replace("modifiersFromInternalFlags", "Modifiers")
- s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
- s = """Modifiers\((\d+)L, newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
- val buf = new StringBuffer()
-
- val flags = m.group(1).toLong
- var s_flags = Flags.modifiersOfFlags(flags) map copypasteModifier
- buf.append("Set(" + s_flags.mkString(", ") + ")")
-
- var privateWithin = m.group(2)
- buf.append(", " + "newTypeName(\"" + privateWithin + "\")")
-
- var annotations = m.group(3)
- buf.append(", " + "List(" + annotations + ")")
-
- var s = buf.toString
- if (s.endsWith(", List()")) s = s.substring(0, s.length - ", List()".length)
- if (s.endsWith(", newTypeName(\"\")")) s = s.substring(0, s.length - ", newTypeName(\"\")".length)
- if (s.endsWith("Set()")) s = s.substring(0, s.length - "Set()".length)
- "Modifiers(" + s + ")"
- })
- s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => {
- val flags = m.group(1).toLong
- val mods = Flags.modifiersOfFlags(flags) map copypasteModifier
- "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))"
- })
-
- println(s)
- }
- }
-
- /** Set of mutable local variables that are free in some inner method. */
- private val freeMutableVars: mutable.Set[Symbol] = new mutable.HashSet
- private val converted: mutable.Set[Symbol] = new mutable.HashSet // debug
-
- override def transformUnit(unit: CompilationUnit) {
- freeMutableVars.clear()
- freeLocalsTraverser(unit.body)
- atPhase(phase.next) {
- super.transformUnit(unit)
- }
- for (v <- freeMutableVars) //!!! remove
- assert(converted contains v, "unconverted: " + v + " in " + v.owner + " in unit " + unit)
- }
-
- override def transform(tree: Tree): Tree = {
- val sym = tree.symbol
- tree match {
- case Apply(_, List(tree)) if sym == Code_lift => // reify Code.lift[T](expr) instances
- val saved = printTypings
- try {
- printTypings = reifyDebug
- debugTrace("transformed = ") {
- val result = localTyper.typedPos(tree.pos)(codify(super.transform(tree)))
- if (reifyCopypaste) printCopypaste(result)
- result
- }
- } finally printTypings = saved
- case ValDef(mods, name, tpt, rhs) if (freeMutableVars(sym)) => // box mutable variables that are accessed from a local closure
- val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
- /* Creating a constructor argument if one isn't present. */
- val constructorArg = rhs match {
- case EmptyTree => gen.mkZero(atPhase(phase.prev)(sym.tpe))
- case _ => transform(rhs)
- }
- val rhs1 = typer.typedPos(rhs.pos) {
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(constructorArg))
- }
- sym resetFlag MUTABLE
- sym removeAnnotation VolatileAttr
- converted += sym // dereference boxed variables
- treeCopy.ValDef(tree, mods &~ MUTABLE, name, tpt1, rhs1)
- case Ident(name) if freeMutableVars(sym) =>
- localTyper.typedPos(tree.pos) {
- Select(tree setType sym.tpe, nme.elem)
- }
- case _ =>
- super.transform(tree)
- }
- }
-
- def codify(tree: Tree): Tree = debugTrace("codified " + tree + " -> ") {
- val targetType = definitions.CodeClass.primaryConstructor.info.paramTypes.head
- val reifier = new Reifier()
- val arg = gen.mkAsInstanceOf(reifier.reifyTopLevel(tree), targetType, wrapInApply = false)
- val treetpe =
- if (tree.tpe.typeSymbol.isAnonymousClass) tree.tpe.typeSymbol.classBound
- else tree.tpe
- New(TypeTree(appliedType(definitions.CodeClass.typeConstructor, List(treetpe.widen))),
- List(List(arg)))
- }
-
- /**
- * PP: There is apparently some degree of overlap between the CAPTURED
- * flag and the role being filled here. I think this is how this was able
- * to go for so long looking only at DefDef and Ident nodes, as bugs
- * would only emerge under more complicated conditions such as #3855.
- * I'll try to figure it all out, but if someone who already knows the
- * whole story wants to fill it in, that too would be great.
- *
- * XXX I found this had been cut and pasted between LiftCode and UnCurry,
- * and seems to be running in both.
- */
- private val freeLocalsTraverser = new Traverser {
- var currentMethod: Symbol = NoSymbol
- var maybeEscaping = false
-
- def withEscaping(body: => Unit) {
- val saved = maybeEscaping
- maybeEscaping = true
- try body
- finally maybeEscaping = saved
- }
-
- override def traverse(tree: Tree) = tree match {
- case DefDef(_, _, _, _, _, _) =>
- val lastMethod = currentMethod
- currentMethod = tree.symbol
- try super.traverse(tree)
- finally currentMethod = lastMethod
- /** A method call with a by-name parameter represents escape. */
- case Apply(fn, args) if fn.symbol.paramss.nonEmpty =>
- traverse(fn)
- for ((param, arg) <- treeInfo.zipMethodParamsAndArgs(tree)) {
- if (param.tpe != null && isByNameParamType(param.tpe))
- withEscaping(traverse(arg))
- else
- traverse(arg)
- }
-
- /** The rhs of a closure represents escape. */
- case Function(vparams, body) =>
- vparams foreach traverse
- withEscaping(traverse(body))
-
- /**
- * The appearance of an ident outside the method where it was defined or
- * anytime maybeEscaping is true implies escape.
- */
- case Ident(_) =>
- val sym = tree.symbol
- if (sym.isVariable && sym.owner.isMethod && (maybeEscaping || sym.owner != currentMethod)) {
- freeMutableVars += sym
- val symTpe = sym.tpe
- val symClass = symTpe.typeSymbol
- atPhase(phase.next) {
- def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
- if (isValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
- else appliedType(objectRefClass.typeConstructor, List(symTpe))
-
- sym updateInfo (
- if (sym.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
- else refType(refClass, ObjectRefClass))
- }
- }
- case _ =>
- super.traverse(tree)
- }
- }
- }
-
- /**
- * Given a tree or type, generate a tree that when executed at runtime produces the original tree or type.
- * For instance: Given
- *
- * var x = 1; Code(x + 1)
- *
- * The `x + 1` expression is reified to
- *
- * $mr.Apply($mr.Select($mr.Ident($mr.freeVar("x". <Int>, x), "+"), List($mr.Literal($mr.Constant(1))))))
- *
- * Or, the term name 'abc' is reified to:
- *
- * $mr.Apply($mr.Select($mr.Ident("newTermName")), List(Literal(Constant("abc")))))
- *
- * todo: Treat embedded Code blocks by merging them into containing block
- *
- */
- class Reifier() {
-
- final val mirrorFullName = "scala.reflect.mirror"
- final val mirrorShortName = "$mr"
- final val mirrorPrefix = mirrorShortName + "."
- final val scalaPrefix = "scala."
- final val localPrefix = "$local"
- final val memoizerName = "$memo"
-
- val reifyDebug = settings.Yreifydebug.value
-
- private val reifiableSyms = mutable.ArrayBuffer[Symbol]() // the symbols that are reified with the tree
- private val symIndex = mutable.HashMap[Symbol, Int]() // the index of a reifiable symbol in `reifiableSyms`
- private var boundSyms = Set[Symbol]() // set of all symbols that are bound in tree to be reified
-
- /**
- * Generate tree of the form
- *
- * { val $mr = scala.reflect.runtime.Mirror
- * $local1 = new TypeSymbol(owner1, NoPosition, name1)
- * ...
- * $localN = new TermSymbol(ownerN, NoPositiion, nameN)
- * $local1.setInfo(tpe1)
- * ...
- * $localN.setInfo(tpeN)
- * $localN.setAnnotations(annotsN)
- * rtree
- * }
- *
- * where
- *
- * - `$localI` are free type symbols in the environment, as well as local symbols
- * of refinement types.
- * - `tpeI` are the info's of `symI`
- * - `rtree` is code that generates `data` at runtime, maintaining all attributes.
- * - `data` is typically a tree or a type.
- */
- def reifyTopLevel(data: Any): Tree = {
- val rtree = reify(data)
- Block(mirrorAlias :: reifySymbolTableSetup, rtree)
- }
-
- private def isLocatable(sym: Symbol) =
- sym.isPackageClass || sym.owner.isClass || sym.isTypeParameter && sym.paramPos >= 0
-
- private def registerReifiableSymbol(sym: Symbol): Unit =
- if (!(symIndex contains sym)) {
- sym.owner.ownersIterator find (x => !isLocatable(x)) foreach registerReifiableSymbol
- symIndex(sym) = reifiableSyms.length
- reifiableSyms += sym
- }
-
- // helper methods
-
- private def localName(sym: Symbol) = localPrefix + symIndex(sym)
-
- private def call(fname: String, args: Tree*): Tree =
- Apply(termPath(fname), args.toList)
-
- private def mirrorSelect(name: String): Tree =
- termPath(mirrorPrefix + name)
-
- private def mirrorCall(name: String, args: Tree*): Tree =
- call(mirrorPrefix + name, args: _*)
-
- private def mirrorFactoryCall(value: Product, args: Tree*): Tree =
- mirrorCall(value.productPrefix, args: _*)
-
- private def scalaFactoryCall(name: String, args: Tree*): Tree =
- call(scalaPrefix + name + ".apply", args: _*)
-
- private def mkList(args: List[Tree]): Tree =
- scalaFactoryCall("collection.immutable.List", args: _*)
-
- private def reifyModifiers(m: Modifiers) =
- mirrorCall("modifiersFromInternalFlags", reify(m.flags), reify(m.privateWithin), reify(m.annotations))
-
- private def reifyAggregate(name: String, args: Any*) =
- scalaFactoryCall(name, (args map reify).toList: _*)
-
- /**
- * Reify a list
- */
- private def reifyList(xs: List[Any]): Tree =
- mkList(xs map reify)
-
- /** Reify a name */
- private def reifyName(name: Name) =
- mirrorCall(if (name.isTypeName) "newTypeName" else "newTermName", Literal(Constant(name.toString)))
-
- private def isFree(sym: Symbol) =
- !(symIndex contains sym)
-
- /**
- * Reify a reference to a symbol
- */
- private def reifySymRef(sym: Symbol): Tree = {
- symIndex get sym match {
- case Some(idx) =>
- Ident(localName(sym))
- case None =>
- if (sym == NoSymbol)
- mirrorSelect("NoSymbol")
- else if (sym.isModuleClass)
- Select(reifySymRef(sym.sourceModule), "moduleClass")
- else if (sym.isStatic && sym.isClass)
- mirrorCall("staticClass", reify(sym.fullName))
- else if (sym.isStatic && sym.isModule)
- mirrorCall("staticModule", reify(sym.fullName))
- else if (isLocatable(sym))
- if (sym.isTypeParameter)
- mirrorCall("selectParam", reify(sym.owner), reify(sym.paramPos))
- else {
- if (reifyDebug) println("locatable: " + sym + " " + sym.isPackageClass + " " + sym.owner + " " + sym.isTypeParameter)
- val rowner = reify(sym.owner)
- val rname = reify(sym.name.toString)
- if (sym.isType)
- mirrorCall("selectType", rowner, rname)
- else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) {
- val index = sym.owner.info.decl(sym.name).alternatives indexOf sym
- assert(index >= 0, sym)
- mirrorCall("selectOverloadedMethod", rowner, rname, reify(index))
- } else
- mirrorCall("selectTerm", rowner, rname)
- }
- else {
- if (sym.isTerm) {
- if (reifyDebug) println("Free: " + sym)
- mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), Ident(sym))
- } else {
- if (reifyDebug) println("Late local: " + sym)
- registerReifiableSymbol(sym)
- reifySymRef(sym)
- }
- }
- }
- }
-
- /**
- * reify the creation of a symbol
- */
- private def reifySymbolDef(sym: Symbol): Tree = {
- if (reifyDebug) println("reify sym def " + sym)
- var rsym: Tree =
- Apply(
- Select(reify(sym.owner), "newNestedSymbol"),
- List(reify(sym.pos), reify(sym.name)))
- if (sym.flags != 0L)
- rsym = Apply(Select(rsym, "setInternalFlags"), List(Literal(Constant(sym.flags))))
- ValDef(NoMods, localName(sym), TypeTree(), rsym)
- }
-
- /**
- * Generate code to add type and annotation info to a reified symbol
- */
- private def fillInSymbol(sym: Symbol): Tree = {
- val rset = Apply(Select(reifySymRef(sym), "setTypeSig"), List(reifyType(sym.info)))
- if (sym.annotations.isEmpty) rset
- else Apply(Select(rset, "setAnnotations"), List(reify(sym.annotations)))
- }
-
- /** Reify a scope */
- private def reifyScope(scope: Scope): Tree = {
- scope foreach registerReifiableSymbol
- mirrorCall("newScopeWith", scope.toList map reifySymRef: _*)
- }
-
- /** Reify a list of symbols that need to be created */
- private def reifySymbols(syms: List[Symbol]): Tree = {
- syms foreach registerReifiableSymbol
- mkList(syms map reifySymRef)
- }
-
- /** Reify a type that defines some symbols */
- private def reifyTypeBinder(value: Product, bound: List[Symbol], underlying: Type): Tree =
- mirrorFactoryCall(value, reifySymbols(bound), reify(underlying))
-
- /** Reify a type */
- private def reifyType(tpe0: Type): Tree = {
- val tpe = tpe0.normalize
- val tsym = tpe.typeSymbol
- if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
- Select(reifySymRef(tpe.typeSymbol), "asTypeConstructor")
- else tpe match {
- case t @ NoType =>
- reifyMirrorObject(t)
- case t @ NoPrefix =>
- reifyMirrorObject(t)
- case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
- mirrorCall("thisModuleType", reify(clazz.fullName))
- case t @ RefinedType(parents, decls) =>
- registerReifiableSymbol(tpe.typeSymbol)
- mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
- case t @ ClassInfoType(parents, decls, clazz) =>
- registerReifiableSymbol(clazz)
- mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
- case t @ ExistentialType(tparams, underlying) =>
- reifyTypeBinder(t, tparams, underlying)
- case t @ PolyType(tparams, underlying) =>
- reifyTypeBinder(t, tparams, underlying)
- case t @ MethodType(params, restpe) =>
- reifyTypeBinder(t, params, restpe)
- case _ =>
- reifyProductUnsafe(tpe)
- }
- }
-
- /** Reify a tree */
- private def reifyTree(tree: Tree): Tree = tree match {
- case EmptyTree =>
- reifyMirrorObject(EmptyTree)
- case This(_) if !(boundSyms contains tree.symbol) =>
- reifyFree(tree)
- case Ident(_) if !(boundSyms contains tree.symbol) =>
- reifyFree(tree)
- case tt: TypeTree if (tt.tpe != null) =>
- if (!(boundSyms exists (tt.tpe contains _))) mirrorCall("TypeTree", reifyType(tt.tpe))
- else if (tt.original != null) reify(tt.original)
- else mirrorCall("TypeTree")
- case ta @ TypeApply(hk, ts) =>
- val thereAreOnlyTTs = ts collect { case t if !t.isInstanceOf[TypeTree] => t } isEmpty;
- val ttsAreNotEssential = ts collect { case tt: TypeTree => tt } find { tt => tt.original != null } isEmpty;
- if (thereAreOnlyTTs && ttsAreNotEssential) reifyTree(hk) else reifyProduct(ta)
- case global.emptyValDef =>
- mirrorSelect("emptyValDef")
- case _ =>
- if (tree.isDef)
- boundSyms += tree.symbol
-
- reifyProduct(tree)
- /*
- if (tree.isDef || tree.isInstanceOf[Function])
- registerReifiableSymbol(tree.symbol)
- if (tree.hasSymbol)
- rtree = Apply(Select(rtree, "setSymbol"), List(reifySymRef(tree.symbol)))
- Apply(Select(rtree, "setType"), List(reifyType(tree.tpe)))
-*/
- }
-
- /**
- * Reify a free reference. The result will be either a mirror reference
- * to a global value, or else a mirror Literal.
- */
- private def reifyFree(tree: Tree): Tree =
- mirrorCall("Ident", reifySymRef(tree.symbol))
-
- // todo: consider whether we should also reify positions
- private def reifyPosition(pos: Position): Tree =
- reifyMirrorObject(NoPosition)
-
- // !!! we must eliminate these casts.
- private def reifyProductUnsafe(x: Any): Tree =
- reifyProduct(x.asInstanceOf[Product])
- private def reifyProduct(x: Product): Tree =
- mirrorCall(x.productPrefix, (x.productIterator map reify).toList: _*)
-
- /**
- * Reify a case object defined in Mirror
- */
- private def reifyMirrorObject(name: String): Tree = mirrorSelect(name)
- private def reifyMirrorObject(x: Product): Tree = reifyMirrorObject(x.productPrefix)
-
- private def isReifiableConstant(value: Any) = value match {
- case null => true // seems pretty reifable to me?
- case _: String => true
- case _ => isAnyVal(value)
- }
-
- /** Reify an arbitary value */
- private def reify(value: Any): Tree = value match {
- case tree: Tree => reifyTree(tree)
- case sym: Symbol => reifySymRef(sym)
- case tpe: Type => reifyType(tpe)
- case xs: List[_] => reifyList(xs)
- case xs: Array[_] => scalaFactoryCall("Array", xs map reify: _*)
- case scope: Scope => reifyScope(scope)
- case x: Name => reifyName(x)
- case x: Position => reifyPosition(x)
- case x: Modifiers => reifyModifiers(x)
- case _ =>
- if (isReifiableConstant(value)) Literal(Constant(value))
- else reifyProductUnsafe(value)
- }
-
- /**
- * An (unreified) path that refers to definition with given fully qualified name
- * @param mkName Creator for last portion of name (either TermName or TypeName)
- */
- private def path(fullname: String, mkName: String => Name): Tree = {
- val parts = fullname split "\\."
- val prefixParts = parts.init
- val lastName = mkName(parts.last)
- if (prefixParts.isEmpty) Ident(lastName)
- else {
- val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _))
- Select(prefixTree, lastName)
- }
- }
-
- /** An (unreified) path that refers to term definition with given fully qualified name */
- private def termPath(fullname: String): Tree = path(fullname, newTermName)
-
- /** An (unreified) path that refers to type definition with given fully qualified name */
- private def typePath(fullname: String): Tree = path(fullname, newTypeName)
-
- private def mirrorAlias =
- ValDef(NoMods, mirrorShortName, TypeTree(), termPath(mirrorFullName))
-
- /**
- * Generate code that generates a symbol table of all symbols registered in `reifiableSyms`
- */
- private def reifySymbolTableSetup: List[Tree] = {
- val symDefs, fillIns = new mutable.ArrayBuffer[Tree]
- var i = 0
- while (i < reifiableSyms.length) {
- // fillInSymbol might create new reifiableSyms, that's why this is done iteratively
- symDefs += reifySymbolDef(reifiableSyms(i))
- fillIns += fillInSymbol(reifiableSyms(i))
- i += 1
- }
-
- symDefs.toList ++ fillIns.toList
- }
-
- private def cannotReify(value: Any): Nothing =
- abort("don't know how to reify " + value + " of " + value.getClass)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c7d3b331a6..b3b7596f9a 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -186,13 +186,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* always accessors and deferred. */
def newGetter(field: Symbol): Symbol = {
// println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
- // atPhase(currentRun.erasurePhase){
- // println("before erasure: "+ (field.info))
- // }
- clazz.newMethod(field.pos, nme.getterName(field.name))
- .setFlag(field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED |
- (if (field.isMutable) 0 else STABLE))
- .setInfo(MethodType(List(), field.info)) // TODO preserve pre-erasure info?
+ val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
+ // TODO preserve pre-erasure info?
+ clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
}
/** Create a new setter. Setters are never private or local. They are
@@ -200,13 +196,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def newSetter(field: Symbol): Symbol = {
//println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
val setterName = nme.getterToSetter(nme.getterName(field.name))
- val setter = clazz.newMethod(field.pos, setterName)
- .setFlag(field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED)
- setter.setInfo(MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)) // TODO preserve pre-erasure info?
- if (needsExpandedSetterName(field)) {
- //println("creating expanded setter from "+field)
+ val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
+ val setter = clazz.newMethod(setterName, field.pos, newFlags)
+ // TODO preserve pre-erasure info?
+ setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
+ if (needsExpandedSetterName(field))
setter.name = nme.expandedSetterName(setter.name, clazz)
- }
+
setter
}
@@ -343,7 +339,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// so it can no longer be found in the member's owner (the trait)
val accessed = atPhase(currentRun.picklerPhase)(member.accessed)
val sym = atPhase(currentRun.erasurePhase){ // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
- clazz.newValue(member.pos, nme.getterToLocal(member.name)).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
+ clazz.newValue(nme.getterToLocal(member.name), member.pos).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
}
sym.updateInfo(member.tpe.resultType) // info at current phase
addMember(clazz,
@@ -361,9 +357,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
} else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
// mixin objects: todo what happens with abstract objects?
- addMember(clazz, member.cloneSymbol(clazz))
- .setPos(clazz.pos)
- .resetFlag(DEFERRED | lateDEFERRED)
+ addMember(clazz, member.cloneSymbol(clazz, member.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
}
}
}
@@ -396,9 +390,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (sourceModule != NoSymbol) {
sourceModule setPos sym.pos
sourceModule.flags = MODULE | FINAL
- } else {
- sourceModule = clazz.owner.newModule(
- sym.pos, sym.name.toTermName, sym.asInstanceOf[ClassSymbol])
+ }
+ else {
+ sourceModule = (
+ clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE | FINAL)
+ setModuleClass sym.asInstanceOf[ClassSymbol]
+ )
clazz.owner.info.decls enter sourceModule
}
sourceModule setInfo sym.tpe
@@ -406,12 +403,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
parents1 = List()
- decls1 = new Scope(decls.toList filter isImplementedStatically)
+ decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
} else if (!parents.isEmpty) {
parents1 = parents.head :: (parents.tail map toInterface)
}
}
- //decls1 = atPhase(phase.next)(new Scope(decls1.toList))//debug
+ //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
if ((parents1 eq parents) && (decls1 eq decls)) tp
else ClassInfoType(parents1, decls1, clazz)
@@ -483,7 +480,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The rootContext used for typing */
private val rootContext =
- erasure.NoContext.make(EmptyTree, RootClass, new Scope)
+ erasure.NoContext.make(EmptyTree, RootClass, newScope)
/** The typer */
private var localTyper: erasure.Typer = _
@@ -530,9 +527,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (currentOwner.isImplClass) {
if (isImplementedStatically(sym)) {
sym setFlag notOVERRIDE
- self = sym.newValue(sym.pos, nme.SELF)
- .setFlag(PARAM)
- .setInfo(toInterface(currentOwner.typeOfThis));
+ self = sym.newValueParameter(nme.SELF, sym.pos) setInfo toInterface(currentOwner.typeOfThis)
val selfdef = ValDef(self) setType NoType
treeCopy.DefDef(tree, mods, name, tparams, List(selfdef :: vparams), tpt, rhs)
} else {
@@ -742,7 +737,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
assert(!sym.isOverloaded, sym)
def createBitmap: Symbol = {
- val sym = clazz0.newVariable(clazz0.pos, bitmapName) setInfo IntClass.tpe
+ val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo IntClass.tpe
atPhase(currentRun.typerPhase)(sym addAnnotation VolatileAttr)
category match {
@@ -1058,7 +1053,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else accessedRef match {
case Literal(_) => accessedRef
case _ =>
- val init = Assign(accessedRef, Ident(sym.paramss.head.head))
+ val init = Assign(accessedRef, Ident(sym.firstParam))
val getter = sym.getter(clazz)
if (!needsInitFlag(getter)) init
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 70f8d37585..e49f8d7c0b 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -45,8 +45,14 @@ abstract class OverridingPairs {
* Types always match. Term symbols match if their membertypes
* relative to <base>.this do
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ protected def matches(sym1: Symbol, sym2: Symbol): Boolean = {
+ def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass
+ val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
+ sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
+
+ result
+ }
/** An implementation of BitSets as arrays (maybe consider collection.BitSet
* for that?) The main purpose of this is to implement
@@ -74,7 +80,7 @@ abstract class OverridingPairs {
}
/** The symbols that can take part in an overriding pair */
- private val decls = new Scope
+ private val decls = newScope
// fill `decls` with overriding shadowing overridden */
{ def fillDecls(bcs: List[Symbol], deferredflag: Int) {
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 73d17458bf..f7d2c9de28 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -37,8 +37,8 @@ abstract class SampleTransform extends Transform {
Select( // The `Select` factory method is defined in class `Trees`
sup,
currentOwner.newValue( // creates a new term symbol owned by `currentowner`
- tree1.pos,
- newTermName("sample")))))) // The standard term name creator
+ newTermName("sample"), // The standard term name creator
+ tree1.pos)))))
case _ =>
tree1
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 9c4889eba9..8c34a9139d 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -6,12 +6,9 @@
package scala.tools.nsc
package transform
-
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
-
-
/** Specialize code on types.
*
* Make sure you've read the thesis:
@@ -64,28 +61,67 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
type TypeEnv = immutable.Map[Symbol, Type]
def emptyEnv: TypeEnv = Map[Symbol, Type]()
+
private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
import definitions.{
RootClass, BooleanClass, UnitClass, ArrayClass,
ScalaValueClasses, isValueClass, isScalaValueType,
SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass,
- AnyRefClass, ObjectClass, Predef_AnyRef,
- uncheckedVarianceClass
+ AnyRefClass, ObjectClass, AnyRefModule,
+ GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
}
+
+ /** TODO - this is a lot of maps.
+ */
+
+ /** For a given class and concrete type arguments, give its specialized class */
+ val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+
+ /** Map a method symbol to a list of its specialized overloads in the same class. */
+ private val overloads: mutable.Map[Symbol, List[Overload]] = mutable.HashMap[Symbol, List[Overload]]() withDefaultValue Nil
+
+ /** Map a symbol to additional information on specialization. */
+ private val info: mutable.Map[Symbol, SpecializedInfo] = perRunCaches.newMap[Symbol, SpecializedInfo]()
+
+ /** Map class symbols to the type environments where they were created. */
+ private val typeEnv = mutable.HashMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
+
+ // holds mappings from regular type parameter symbols to symbols of
+ // specialized type parameters which are subtypes of AnyRef
+ private val anyrefSpecCache = perRunCaches.newMap[Symbol, Symbol]()
+
+ // holds mappings from members to the type variables in the class
+ // that they were already specialized for, so that they don't get
+ // specialized twice (this is for AnyRef specializations)
+ private val wasSpecializedForTypeVars = perRunCaches.newMap[Symbol, Set[Symbol]]() withDefaultValue Set()
+
+ /** Concrete methods that use a specialized type, or override such methods. */
+ private val concreteSpecMethods = new mutable.HashSet[Symbol]()
+
private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass
private def hasSpecializedFlag(sym: Symbol) = sym hasFlag SPECIALIZED
private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized
- private def specializedOn(sym: Symbol) = sym getAnnotation SpecializedClass match {
- case Some(AnnotationInfo(_, args, _)) => args
- case _ => Nil
+ private def specializedOn(sym: Symbol): List[Symbol] = {
+ sym getAnnotation SpecializedClass match {
+ case Some(ann @ AnnotationInfo(_, args, _)) =>
+ args map (_.tpe) flatMap { tp =>
+ tp baseType GroupOfSpecializable match {
+ case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
+ arg.typeArgs map (_.typeSymbol)
+ case _ =>
+ List(tp.typeSymbol)
+ }
+ }
+ case _ => Nil
+ }
}
// If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = (
- specializedOn(sym).exists(_.symbol == Predef_AnyRef) // specialized on AnyRef
+ (specializedOn(sym) contains AnyRefModule)
&& !isValueClass(tp.typeSymbol)
&& isBoundedGeneric(tp)
)
@@ -102,7 +138,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = {
ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args))
- emptyEnv ++ (sym.info.typeParams zip args filter (kv => isSpecialized(kv._1)))
+ emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, v) => isSpecialized(k))
}
/** Does typeenv `t1` include `t2`? All type variables in `t1`
@@ -135,21 +171,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
-
- /** Returns the generic class that was specialized to 'cls', or
- * 'cls' itself if cls is not a specialized subclass.
+ /** Returns the generic class that was specialized to 'sClass', or
+ * 'sClass' itself if sClass is not a specialized subclass.
*/
- def genericClass(cls: Symbol): Symbol =
- if (hasSpecializedFlag(cls)) cls.info.parents.head.typeSymbol
- else cls
-
- /** Map a method symbol to a list of its specialized overloads in the same class. */
- private val overloads: mutable.Map[Symbol, List[Overload]] =
- new mutable.HashMap[Symbol, List[Overload]] {
- override def default(key: Symbol): List[Overload] = Nil
- }
+ def genericClass(sClass: Symbol): Symbol =
+ if (hasSpecializedFlag(sClass)) sClass.superClass
+ else sClass
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
@@ -227,9 +254,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Map a symbol to additional information on specialization. */
- private val info: mutable.Map[Symbol, SpecializedInfo] = perRunCaches.newMap[Symbol, SpecializedInfo]()
-
/** Has `clazz` any type parameters that need be specialized? */
def hasSpecializedParams(clazz: Symbol) =
clazz.info.typeParams exists isSpecialized
@@ -255,7 +279,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- val args1 = (args zip sym.typeParams) map {
+ val args1 = map2(args, sym.typeParams) {
case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
case (tp, _) => tp
}
@@ -305,28 +329,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- lazy val primitiveTypes = ScalaValueClasses map (_.tpe)
+ lazy val specializableTypes = (ScalaValueClasses :+ AnyRefClass) map (_.tpe) sorted
+
+ /** If the symbol is the companion of a value class, the value class.
+ * Otherwise, AnyRef.
+ */
+ def specializesClass(sym: Symbol): Symbol = {
+ val c = sym.companionClass
+ if (isValueClass(c)) c else AnyRefClass
+ }
/** Return the types `sym` should be specialized at. This may be some of the primitive types
* or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a
* subtype of AnyRef (T <: AnyRef).
* These are in a meaningful order for stability purposes.
*/
- def concreteTypes(sym: Symbol): List[Type] = (
- if (!isSpecialized(sym)) Nil // no @specialized Annotation
- else specializedOn(sym) match {
- case Nil => primitiveTypes // specialized on everything
- case args => // specialized on args
- (args map { tp =>
- if (tp.symbol == Predef_AnyRef) {
- if (isBoundedGeneric(sym.tpe))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
- AnyRefClass.tpe
- }
- else tp.symbol.companionClass.tpe
- }).sorted
- }
- )
+ def concreteTypes(sym: Symbol): List[Type] = {
+ val types = (
+ if (!isSpecialized(sym)) Nil // no @specialized Annotation
+ else specializedOn(sym) match {
+ case Nil => specializableTypes // specialized on everything
+ case args => args map (s => specializesClass(s).tpe) sorted // specialized on args
+ }
+ )
+ if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+
+ types
+ }
/** Return a list of all type environments for all specializations
* of @specialized types in `tps`.
@@ -341,7 +371,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
// zip the keys with each permutation to create a TypeEnv
- loop(keys map concreteTypes) map (keys zip _ toMap)
+ loop(keys map concreteTypes) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -398,25 +428,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => Set()
}
- // holds mappings from regular type parameter symbols to symbols of
- // specialized type parameters which are subtypes of AnyRef
- private val anyrefSpecCache = perRunCaches.newMap[Symbol, Symbol]()
-
- /** Returns the type parameter in the specialized class `cls` that corresponds to type parameter
+ /** Returns the type parameter in the specialized class `clazz` that corresponds to type parameter
* `sym` in the original class. It will create it if needed or use the one from the cache.
*/
- private def typeParamSubAnyRef(sym: Symbol, cls: Symbol) = (
+ private def typeParamSubAnyRef(sym: Symbol, clazz: Symbol) = (
anyrefSpecCache.getOrElseUpdate(sym,
- cls.newTypeParameter(sym.pos, sym.name append nme.SPECIALIZED_SUFFIX_NAME toTypeName)
+ clazz.newTypeParameter(sym.name append nme.SPECIALIZED_SUFFIX_NAME toTypeName, sym.pos)
setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
).tpe
)
/** Cleans the anyrefSpecCache of all type parameter symbols of a class.
*/
- private def cleanAnyRefSpecCache(cls: Symbol, decls: List[Symbol]) = (
+ private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) = (
// remove class type parameters and those of normalized members.
- cls :: decls foreach {
+ clazz :: decls foreach {
_.tpe match {
case PolyType(tparams, _) => anyrefSpecCache --= tparams
case _ => ()
@@ -424,12 +450,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
)
- // holds mappings from members to the type variables in the class
- // that they were already specialized for, so that they don't get
- // specialized twice (this is for AnyRef specializations)
- private val wasSpecializedForTypeVars =
- perRunCaches.newMap[Symbol, immutable.Set[Symbol]]() withDefaultValue immutable.Set[Symbol]()
-
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
params.filter(p => !isSpecialized(p) || !isScalaValueType(env(p)))
@@ -445,7 +465,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def produceTypeParameters(syms: List[Symbol], nowner: Symbol, env: TypeEnv) = {
val cloned = for (s <- syms) yield if (!env.contains(s)) s.cloneSymbol(nowner) else env(s).typeSymbol
// log("producing type params: " + cloned.map(t => (t, t.tpe.bounds.hi)))
- for ((orig, cln) <- syms zip cloned) {
+ foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
if (env.contains(orig))
cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
@@ -479,38 +499,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = {
def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = {
- val cls = clazz.owner.newClass(clazz.pos, specializedName(clazz, env0).toTypeName)
- .setFlag(SPECIALIZED | clazz.flags)
- .resetFlag(CASE)
- cls.sourceFile = clazz.sourceFile
- currentRun.symSource(cls) = clazz.sourceFile // needed later on by mixin
-
- val env = mapAnyRefsInSpecSym(env0, clazz, cls)
-
- typeEnv(cls) = env
- this.specializedClass((clazz, env0)) = cls
+ /** It gets hard to follow all the clazz and cls, and specializedClass
+ * was both already used for a map and mucho long. So "sClass" is the
+ * specialized subclass of "clazz" throughout this file.
+ */
+ val sClass = clazz.owner.newClass(specializedName(clazz, env0).toTypeName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
- // declarations of the newly specialized class 'cls'
- val decls1 = new Scope
+ def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long) =
+ member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED))
+
+ sClass.sourceFile = clazz.sourceFile
+ currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
- // original unspecialized type parameters
- var oldClassTParams: List[Symbol] = Nil
+ val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
+ typeEnv(sClass) = env
+ this.specializedClass((clazz, env0)) = sClass
- // unspecialized type parameters of 'cls' (cloned)
- var newClassTParams: List[Symbol] = Nil
+ val decls1 = newScope // declarations of the newly specialized class 'sClass'
+ var oldClassTParams: List[Symbol] = Nil // original unspecialized type parameters
+ var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned)
// has to be a val in order to be computed early. It is later called
// within 'atPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
- // val (_, unspecParams) = splitParams(clazz.info.typeParams)
- // oldClassTParams = unspecParams
- val survivedParams = survivingParams(clazz.info.typeParams, env)
- oldClassTParams = survivedParams
- newClassTParams = produceTypeParameters(survivedParams, cls, env) map subst(env)
+ oldClassTParams = survivingParams(clazz.info.typeParams, env)
+ newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
// log("new tparams " + newClassTParams.zip(newClassTParams map {s => (s.tpe, s.tpe.bounds.hi)}) + ", in env: " + env)
def applyContext(tpe: Type) =
- subst(env, tpe).instantiateTypeParams(survivedParams, newClassTParams map (_.tpe))
+ subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))
/** Return a list of specialized parents to be re-mixed in a specialized subclass.
* Assuming env = [T -> Int] and
@@ -520,25 +537,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* so that class Integral$mci extends Integral[Int] with Numeric$mcI.
*/
def specializedParents(parents: List[Type]): List[Type] = {
- val res = new mutable.ListBuffer[Type]
- // log(cls + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
+ var res: List[Type] = Nil
+ // log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- // log(p.typeSymbol)
val stp = atPhase(phase.next)(specializedType(p))
if (stp != p)
- if (p.typeSymbol.isTrait) res += stp
+ if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
reporter.warning(clazz.pos, p.typeSymbol + " must be a trait. Specialized version of "
+ clazz + " will inherit generic " + p) // TODO change to error
}
- res.reverse.toList
+ res
}
var parents = List(applyContext(atPhase(currentRun.typerPhase)(clazz.tpe)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
- val extraSpecializedMixins = specializedParents(clazz.info.parents.map(applyContext))
+ val extraSpecializedMixins = specializedParents(clazz.info.parents map applyContext)
if (extraSpecializedMixins.nonEmpty)
debuglog("specializeClass on " + clazz + " founds extra specialized mixins: " + extraSpecializedMixins.mkString(", "))
// If the class being specialized has a self-type, the self type may
@@ -547,17 +563,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// already be covered. Then apply the current context to the self-type
// as with the parents and assign it to typeOfThis.
if (clazz.typeOfThis.typeConstructor ne clazz.typeConstructor) {
- cls.typeOfThis = applyContext(clazz.typeOfThis)
+ sClass.typeOfThis = applyContext(clazz.typeOfThis)
log("Rewriting self-type for specialized class:\n" +
- " " + clazz.defStringSeenAs(clazz.typeOfThis) + "\n" +
- " => " + cls.defStringSeenAs(cls.typeOfThis)
+ " " + clazz.defStringSeenAs(clazz.typeOfThis) + "\n" +
+ " => " + sClass.defStringSeenAs(sClass.typeOfThis)
)
}
- val infoType = ClassInfoType(parents ::: extraSpecializedMixins, decls1, cls)
- if (newClassTParams.isEmpty) infoType else PolyType(newClassTParams, infoType)
+ polyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- atPhase(phase.next)(cls.setInfo(specializedInfoType))
+ atPhase(phase.next)(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
/** Enter 'sym' in the scope of the current specialized class. It's type is
@@ -567,14 +582,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
- sym modifyInfo (_.substThis(clazz, cls).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)))
+ sym modifyInfo (_.substThis(clazz, sClass).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)))
// we remove any default parameters. At this point, they have been all
// resolved by the type checker. Later on, erasure re-typechecks everything and
// chokes if it finds default parameters for specialized members, even though
// they are never needed.
- sym.info.paramss.flatten foreach (_.resetFlag(DEFAULTPARAM))
-
- decls1.enter(subst(fullEnv)(sym))
+ mapParamss(sym)(_ resetFlag DEFAULTPARAM)
+ decls1 enter subst(fullEnv)(sym)
}
/** Create and enter in scope an overridden symbol m1 for `m` that forwards
@@ -591,13 +605,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
*/
def forwardToOverload(m: Symbol): Symbol = {
- val specMember = (
- enterMember(m cloneSymbol cls)
- setFlag (OVERRIDE | SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR)
- ) // m1
-
- val om = specializedOverload(cls, m, env).setFlag(OVERRIDE)
+ val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR)))
+ val om = specializedOverload(sClass, m, env).setFlag(OVERRIDE)
val original = info.get(m) match {
case Some(NormalizedMember(tg)) => tg
case _ => m
@@ -618,7 +627,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
m.resetFlag(PRIVATE).setFlag(PROTECTED)
if (m.isConstructor) {
- val specCtor = enterMember(m.cloneSymbol(cls) setFlag SPECIALIZED)
+ val specCtor = enterMember(cloneInSpecializedClass(m, x => x))
info(specCtor) = Forward(m)
}
else if (isNormalizedMember(m)) { // methods added by normalization
@@ -626,7 +635,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (nonConflicting(env ++ typeEnv(m))) {
if (info(m).degenerate) {
debuglog("degenerate normalized member " + m + " info(m): " + info(m))
- val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
+ val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED))
info(specMember) = Implementation(original)
typeEnv(specMember) = env ++ typeEnv(m)
@@ -640,7 +649,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
log("conflicting env for " + m + " env: " + env)
}
else if (m.isDeferred) { // abstract methods
- val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).setFlag(DEFERRED)
+ val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED))
debuglog("deferred " + specMember.fullName + " remains abstract")
info(specMember) = new Abstract(specMember)
@@ -653,24 +662,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else if (m.isValue && !m.isMethod) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
- val sym = (
- cls.newMethod(field.pos, name)
- setFlag (SPECIALIZED | m.getter(clazz).flags)
- resetFlag (LOCAL | PARAMACCESSOR | CASEACCESSOR | LAZY)
- // we rely on the super class to initialize param accessors
- )
+ val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR | LAZY)
+ // we rely on the super class to initialize param accessors
+ val sym = sClass.newMethod(name, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
def overrideIn(clazz: Symbol, sym: Symbol) = {
- val sym1 = (
- sym cloneSymbol clazz
- setFlag (OVERRIDE | SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
- )
+ val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
+ val sym1 = sym.cloneSymbol(clazz, newFlags)
sym1 modifyInfo (_ asSeenFrom (clazz.tpe, sym1.owner))
}
- val specVal = specializedOverload(cls, m, env)
+ val specVal = specializedOverload(sClass, m, env)
addConcreteSpecMethod(m)
specVal.asInstanceOf[TermSymbol].setAlias(m)
@@ -679,15 +682,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// create accessors
debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
- val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)).setInfo(MethodType(List(), specVal.info))
- val origGetter = overrideIn(cls, m.getter(clazz))
+ val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+ val origGetter = overrideIn(sClass, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
enterMember(origGetter)
debuglog("created accessors: " + specGetter + " orig: " + origGetter)
clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa =>
- val cfaGetter = overrideIn(cls, cfa)
+ val cfaGetter = overrideIn(sClass, cfa)
info(cfaGetter) = SpecializedAccessor(specVal)
enterMember(cfaGetter)
debuglog("found case field accessor for " + m + " added override " + cfaGetter);
@@ -698,7 +701,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
UnitClass.tpe))
- val origSetter = overrideIn(cls, m.setter(clazz))
+ val origSetter = overrideIn(sClass, m.setter(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
enterMember(origSetter)
@@ -708,7 +711,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specVal.resetFlag(PRIVATE)
}
} else if (m.isClass) {
- val specClass: Symbol = m.cloneSymbol(cls).setFlag(SPECIALIZED)
+ val specClass: Symbol = cloneInSpecializedClass(m, x => x)
typeEnv(specClass) = fullEnv
specClass.name = specializedName(specClass, fullEnv).toTypeName
enterMember(specClass)
@@ -716,7 +719,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(specClass) = SpecializedInnerClass(m, fullEnv)
}
}
- cls
+ sClass
}
val decls1 = clazz.info.decls.toList flatMap { m: Symbol =>
@@ -784,7 +787,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
val tps = survivingParams(sym.info.typeParams, env0)
- val specMember = sym.cloneSymbol(owner).setFlag(SPECIALIZED).resetFlag(DEFERRED)
+ val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED)
val env = mapAnyRefsInSpecSym(env0, sym, specMember)
val (keys, vals) = env.toList.unzip
@@ -829,7 +832,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
log("-->d SETTING PRIVATE WITHIN TO " + sym.enclosingPackage + " for " + sym)
}
- sym.resetFlag(FINAL)
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -858,14 +860,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the specialized overload of `m`, in the given environment. */
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
- val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
+ // this method properly duplicates the symbol's info
+ val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | ACCESSOR | LAZY))
specMember.name = specializedName(sym, env)
-
- (specMember
- modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
- setFlag (SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
- )
+ specMember modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
}
/** For each method m that overrides an inherited method m', add a special
@@ -889,7 +887,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
- for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams)) {
+ foreach2(overridden.info.typeParams, overriding.info.typeParams) { (baseTvar, derivedTvar) =>
val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet
if (missing.nonEmpty) {
reporter.error(derivedTvar.pos,
@@ -1024,9 +1022,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Map class symbols to the type environments where they were created. */
- private val typeEnv = mutable.HashMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
-
/** Apply type bindings in the given environment `env` to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
decls map subst(env)
@@ -1106,7 +1101,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (tparams.nonEmpty) " (poly)" else "",
clazz, parents1, phase)
)
- val newScope = new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz))
+ val newScope = newScopeWith(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz): _*)
// If tparams.isEmpty, this is just the ClassInfoType.
polyType(tparams, ClassInfoType(parents1, newScope, clazz))
case _ =>
@@ -1213,11 +1208,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
clazz.owner.info.decl(originalName).suchThat(_.isClass)
} else NoSymbol
- def illegalSpecializedInheritance(clazz: Symbol): Boolean = {
- hasSpecializedFlag(clazz) && originalClass(clazz).info.parents.exists { p =>
- hasSpecializedParams(p.typeSymbol) && !p.typeSymbol.isTrait
- }
- }
+ def illegalSpecializedInheritance(clazz: Symbol): Boolean = (
+ hasSpecializedFlag(clazz)
+ && originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait)
+ )
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
/** Map a specializable method to it's rhs, when not deferred. */
@@ -1235,7 +1229,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = vparamss map (_ map (_.symbol))
+ parameters(tree.symbol) = mmap(vparamss)(_.symbol)
concreteSpecMethods -= tree.symbol
} // no need to descend further down inside method bodies
@@ -1276,7 +1270,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
.format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
if (!env.isEmpty) { // a method?
val specCandidates = qual.tpe.member(specializedName(symbol, env))
- val specMember = specCandidates suchThat (s => doesConform(symbol, tree.tpe, s.tpe, env))
+ val specMember = specCandidates suchThat { s =>
+ doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
+ }
+
log("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
log("[specSym] found specMember: " + specMember)
if (specMember ne NoSymbol)
@@ -1391,9 +1388,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
- val parents1 = currentOwner.info.parents.zipWithIndex.map {
- case (tpe, idx) => TypeTree(tpe) setPos parents(idx).pos
- }
+ val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) =>
+ TypeTree(tpe) setPos parent.pos)
+
treeCopy.Template(tree,
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
@@ -1589,43 +1586,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
}
- /** Create trees for specialized members of 'cls', based on the
+ /** Create trees for specialized members of 'sClass', based on the
* symbols that are already there.
*/
- private def makeSpecializedMembers(cls: Symbol): List[Tree] = {
+ private def makeSpecializedMembers(sClass: Symbol): List[Tree] = {
// add special overrides first
-// if (!cls.hasFlag(SPECIALIZED))
-// for (m <- specialOverrides(cls)) cls.info.decls.enter(m)
+// if (!specializedClass.hasFlag(SPECIALIZED))
+// for (m <- specialOverrides(specializedClass)) specializedClass.info.decls.enter(m)
val mbrs = new mutable.ListBuffer[Tree]
var hasSpecializedFields = false
- for (m <- cls.info.decls
+ for (m <- sClass.info.decls
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
- && satisfiable(typeEnv(m), !cls.hasFlag(SPECIALIZED))) {
+ && satisfiable(typeEnv(m), !sClass.hasFlag(SPECIALIZED))) {
log("creating tree for " + m.fullName)
if (m.isMethod) {
if (info(m).target.hasAccessorFlag) hasSpecializedFields = true
if (m.isClassConstructor) {
val origParamss = parameters(info(m).target)
val vparams = (
- for ((tp, sym) <- m.info.paramTypes zip origParamss(0)) yield (
- m.newValue(sym.pos, specializedName(sym, typeEnv(cls)))
- .setInfo(tp)
- .setFlag(sym.flags)
+ map2(m.info.paramTypes, origParamss(0))((tp, sym) =>
+ m.newValue(specializedName(sym, typeEnv(sClass)), sym.pos, sym.flags) setInfo tp
)
)
// param accessors for private members (the others are inherited from the generic class)
if (m.isPrimaryConstructor) {
- for (param <- vparams ; if cls.info.nonPrivateMember(param.name) == NoSymbol) {
- val acc = param.cloneSymbol(cls).setFlag(PARAMACCESSOR | PRIVATE)
- cls.info.decls.enter(acc)
+ for (param <- vparams ; if sClass.info.nonPrivateMember(param.name) == NoSymbol) {
+ val acc = param.cloneSymbol(sClass, param.flags | PARAMACCESSOR | PRIVATE)
+ sClass.info.decls.enter(acc)
mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos)
}
}
// ctor
- mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), List(vparams) map (_ map ValDef), EmptyTree))
+ mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
} else {
mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
}
@@ -1639,10 +1634,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
if (hasSpecializedFields) {
- val isSpecializedInstance = cls.hasFlag(SPECIALIZED) || cls.info.parents.exists(_.typeSymbol.hasFlag(SPECIALIZED))
- val sym = cls.newMethod(nme.SPECIALIZED_INSTANCE, cls.pos)
- .setInfo(MethodType(Nil, BooleanClass.tpe))
- cls.info.decls.enter(sym)
+ val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
+ val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+
mbrs += atPos(sym.pos) {
DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
}
@@ -1671,7 +1665,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
private def forwardCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
- val argss = paramss map (_ map (x => Ident(x.symbol)))
+ val argss = mmap(paramss)(x => Ident(x.symbol))
atPos(pos) { (receiver /: argss) (Apply) }
}
@@ -1708,18 +1702,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
&& clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
)
- val argss = paramss map (_ map (x =>
+ val argss = mmap(paramss)(x =>
if (initializesSpecializedField(x.symbol))
gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe)
else
- Ident(x.symbol))
+ Ident(x.symbol)
)
atPos(pos) { (receiver /: argss) (Apply) }
}
- /** Concrete methods that use a specialized type, or override such methods. */
- private val concreteSpecMethods = new mutable.HashSet[Symbol]()
-
/** Add method m to the set of symbols for which we need an implementation tree
* in the tree transformer.
*
@@ -1732,8 +1723,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = (
//! TODO: make sure the param types are seen from the right prefix
- for ((tp, arg) <- fun.info.paramTypes zip vparams) yield
- gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)
+ map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
)
private def findSpec(tp: Type): Type = tp match {
case TypeRef(pre, sym, _ :: _) => specializedType(tp)
@@ -1742,9 +1732,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
- override def transform(tree: Tree) =
- if (settings.nospecialization.value) tree
+ override def transform(tree: Tree) = {
+ val resultTree = if (settings.nospecialization.value) tree
else atPhase(phase.next)(specializeCalls(unit).transform(tree))
+
+ // Remove the final modifier and @inline annotation from anything in the
+ // original class (since it's being overridden in at least onesubclass).
+ //
+ // We do this here so that the specialized subclasses will correctly copy
+ // final and @inline.
+ info.foreach {
+ case (sym, SpecialOverload(target, _)) => {
+ sym.resetFlag(FINAL)
+ target.resetFlag(FINAL)
+ sym.removeAnnotation(ScalaInlineClass)
+ target.removeAnnotation(ScalaInlineClass)
+ }
+ case _ => {}
+ }
+
+ resultTree
+ }
}
def printSpecStats() {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index ca16e491e2..1655ad09c4 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -128,7 +128,7 @@ abstract class TailCalls extends Transform {
* the label field.
*/
this.label = {
- val label = method.newLabel(method.pos, "_" + method.name)
+ val label = method.newLabel(newTermName("_" + method.name), method.pos)
val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
}
@@ -144,7 +144,7 @@ abstract class TailCalls extends Transform {
def isTransformed = isEligible && accessed
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
- def newThis(pos: Position) = method.newValue(pos, nme.THIS) setInfo currentClass.typeOfThis setFlag SYNTHETIC
+ def newThis(pos: Position) = method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
override def toString(): String = (
"" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 90f46206c5..ab4a2141a9 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -133,11 +133,9 @@ abstract class UnCurry extends InfoTransform
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
- nonLocalReturnKeys.getOrElseUpdate(meth, {
- meth.newValue(meth.pos, unit.freshTermName("nonLocalReturnKey"))
- .setFlag (SYNTHETIC)
- .setInfo (ObjectClass.tpe)
- })
+ nonLocalReturnKeys.getOrElseUpdate(meth,
+ meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+ )
/** Generate a non-local return throw with given return expression from given method.
* I.e. for the method's non-local return key, generate:
@@ -170,7 +168,7 @@ abstract class UnCurry extends InfoTransform
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
localTyper.typed {
val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
- val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
+ val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
val pat = Bind(ex,
Typed(Ident(nme.WILDCARD),
AppliedTypeTree(Ident(NonLocalReturnControlClass),
@@ -255,40 +253,42 @@ abstract class UnCurry extends InfoTransform
if (fun1 ne fun) fun1
else {
val (formals, restpe) = (targs.init, targs.last)
- val anonClass = owner newAnonymousFunctionClass fun.pos setFlag (FINAL | SYNTHETIC | inConstructorFlag)
+ val anonClass = owner.newAnonymousFunctionClass(fun.pos, inConstructorFlag)
def parents =
if (isFunctionType(fun.tpe)) List(abstractFunctionForFunctionType(fun.tpe), SerializableClass.tpe)
else if (isPartial) List(appliedType(AbstractPartialFunctionClass.typeConstructor, targs), SerializableClass.tpe)
else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe)
- anonClass setInfo ClassInfoType(parents, new Scope, anonClass)
- val applyMethod = anonClass.newMethod(fun.pos, nme.apply) setFlag FINAL
- applyMethod setInfo MethodType(applyMethod newSyntheticValueParams formals, restpe)
- anonClass.info.decls enter applyMethod
- anonClass.addAnnotation(serialVersionUIDAnnotation)
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+ val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL)
+ applyMethod setInfoAndEnter MethodType(applyMethod newSyntheticValueParams formals, restpe)
+ anonClass addAnnotation serialVersionUIDAnnotation
fun.vparams foreach (_.symbol.owner = applyMethod)
- new ChangeOwnerTraverser(fun.symbol, applyMethod) traverse fun.body
+ fun.body.changeOwner(fun.symbol -> applyMethod)
def missingCaseCall(scrutinee: Tree): Tree = Apply(Select(This(anonClass), nme.missingCase), List(scrutinee))
def applyMethodDef() = {
- val body =
+ val body = localTyper.typedPos(fun.pos) {
if (isPartial) gen.mkUncheckedMatch(gen.withDefaultCase(fun.body, missingCaseCall))
else fun.body
- DefDef(Modifiers(FINAL), nme.apply, Nil, List(fun.vparams), TypeTree(restpe), body) setSymbol applyMethod
+ }
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ val applyResultType = localTyper.packedType(body, applyMethod)
+ DefDef(Modifiers(FINAL), nme.apply, Nil, List(fun.vparams), TypeTree(applyResultType), body) setSymbol applyMethod
}
def isDefinedAtMethodDef() = {
val isDefinedAtName = {
if (anonClass.info.member(nme._isDefinedAt) != NoSymbol) nme._isDefinedAt
else nme.isDefinedAt
}
- val m = anonClass.newMethod(fun.pos, isDefinedAtName) setFlag FINAL
- m setInfo MethodType(m newSyntheticValueParams formals, BooleanClass.tpe)
- anonClass.info.decls enter m
- val vparam = fun.vparams.head.symbol
- val idparam = m.paramss.head.head
- val substParam = new TreeSymSubstituter(List(vparam), List(idparam))
+ val m = anonClass.newMethod(isDefinedAtName, fun.pos, FINAL)
+ val params = m newSyntheticValueParams formals
+ m setInfoAndEnter MethodType(params, BooleanClass.tpe)
+
+ val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
// waiting here until we can mix case classes and extractors reliably (i.e., when virtpatmat becomes the default)
@@ -357,7 +357,7 @@ abstract class UnCurry extends InfoTransform
case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat =>
object noOne extends Transformer {
override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = tgt.tpe member "one".toTermName
+ val one = tgt.tpe member newTermName("one")
override def transform(tree: Tree): Tree = tree match {
case Apply(fun, List(a)) if fun.symbol == one =>
// blow one's argument away since all we want to know is whether the match succeeds or not
@@ -367,7 +367,7 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
}
}
- substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member("isSuccess".toTermName)), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
+ substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
// for the optimized version of virtpatmat
case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) if opt.virtPatmat =>
dupVirtMatch(zero, x, matchRes, keepGoing, stats)
@@ -422,7 +422,7 @@ abstract class UnCurry extends InfoTransform
if (tp.typeSymbol.isBottomClass) getManifest(AnyClass.tpe)
else if (!manifestOpt.tree.isEmpty) manifestOpt.tree
else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi)
- else localTyper.getManifestTree(tree.pos, tp, false)
+ else localTyper.getManifestTree(tree, tp, false)
}
atPhase(phase.next) {
localTyper.typedPos(pos) {
@@ -452,7 +452,7 @@ abstract class UnCurry extends InfoTransform
atPhase(phase.next) {
if (isJava && isPrimitiveArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
suffix = localTyper.typedPos(pos) {
- gen.mkRuntimeCall("toObjectArray", List(suffix))
+ gen.mkRuntimeCall(nme.toObjectArray, List(suffix))
}
}
}
@@ -461,7 +461,7 @@ abstract class UnCurry extends InfoTransform
val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args
- (formals, args1).zipped map { (formal, arg) =>
+ map2(formals, args1) { (formal, arg) =>
if (!isByNameParamType(formal)) {
arg
} else if (isByNameRef(arg)) {
@@ -516,9 +516,9 @@ abstract class UnCurry extends InfoTransform
*/
def liftTree(tree: Tree) = {
debuglog("lifting tree at: " + (tree.pos))
- val sym = currentOwner.newMethod(tree.pos, unit.freshTermName("liftedTree"))
+ val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos)
sym.setInfo(MethodType(List(), tree.tpe))
- new ChangeOwnerTraverser(currentOwner, sym).traverse(tree)
+ tree.changeOwner(currentOwner -> sym)
localTyper.typedPos(tree.pos)(Block(
List(DefDef(sym, List(Nil), tree)),
Apply(Ident(sym), Nil)
@@ -771,8 +771,8 @@ abstract class UnCurry extends InfoTransform
case p => p.symbol.tpe
}
val forwresult = dd.symbol.tpe.finalResultType
- val forwformsyms = (forwformals, flatparams).zipped map ((tp, oldparam) =>
- currentClass.newValueParameter(oldparam.symbol.pos, oldparam.name).setInfo(tp)
+ val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
+ currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
)
def mono = MethodType(forwformsyms, forwresult)
val forwtype = dd.symbol.tpe match {
@@ -781,15 +781,11 @@ abstract class UnCurry extends InfoTransform
}
// create the symbol
- val forwsym = (
- currentClass.newMethod(dd.pos, dd.name)
- . setFlag (VARARGS | SYNTHETIC | flatdd.symbol.flags)
- . setInfo (forwtype)
- )
+ val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = (forwsym ARGS, flatparams).zipped map {
+ val locals = map2(forwsym ARGS, flatparams) {
case (_, fp) if !rpsymbols(fp.symbol) => null
case (argsym, fp) =>
Block(Nil,
@@ -799,7 +795,7 @@ abstract class UnCurry extends InfoTransform
)
)
}
- val seqargs = (locals, forwsym ARGS).zipped map {
+ val seqargs = map2(locals, forwsym ARGS) {
case (null, argsym) => Ident(argsym)
case (l, _) => l
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 16d55c26ca..18c7635b1e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -23,6 +23,7 @@ trait Analyzer extends AnyRef
with Macros
with NamesDefaults
with TypeDiagnostics
+ with ContextErrors
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
new file mode 100644
index 0000000000..6ee09d064f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -0,0 +1,1056 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.util.StringOps.{ countElementsAsString, countAsString }
+import symtab.Flags.{ PRIVATE, PROTECTED }
+import scala.tools.util.EditDistance.similarString
+
+trait ContextErrors {
+ self: Analyzer =>
+
+ import global._
+
+ object ErrorKinds extends Enumeration {
+ type ErrorKind = Value
+ val Normal, Access, Ambiguous, Divergent = Value
+ }
+
+ import ErrorKinds.ErrorKind
+
+ trait AbsTypeError extends Throwable {
+ def errPos: Position
+ def errMsg: String
+ def kind: ErrorKind
+ }
+
+ case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+
+ def errPos:Position = underlyingTree.pos
+ override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+ }
+
+ case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+
+ def errPos = underlyingSym.pos
+ }
+
+ case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+ def errMsg = ex.msg
+ def errPos = ex.pos
+ }
+
+ case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+ def errMsg = ex.msg
+ def errPos = tree.pos
+ }
+
+ case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+
+ case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+
+ object ErrorUtils {
+ def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
+ issueTypeError(NormalTypeError(tree, msg))
+ }
+
+ def issueSymbolTypeError(sym: Symbol, msg: String)(implicit context: Context) {
+ issueTypeError(SymbolTypeError(sym, msg))
+ }
+
+ def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
+ issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
+ }
+
+ def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
+ context.issueAmbiguousError(pre, sym1, sym2, err)
+ }
+
+ def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
+
+ def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
+ def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+ "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
+ }
+ }
+
+ import ErrorUtils._
+
+ trait TyperContextErrors {
+ self: Typer =>
+
+ import infer.setError
+
+ object TyperErrorGen {
+ implicit val context0: Context = infer.getContext
+
+ def UnstableTreeError(tree: Tree) = {
+ def addendum = {
+ "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile."
+ }
+ issueNormalTypeError(tree,
+ "stable identifier required, but "+tree+" found." + (
+ if (isStableExceptVolatile(tree)) addendum else ""))
+ setError(tree)
+ }
+
+ def NoImplicitFoundError(tree: Tree, param: Symbol) = {
+ def errMsg = {
+ val paramName = param.name
+ val paramTp = param.tpe
+ paramTp.typeSymbol match {
+ case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+ case _ =>
+ "could not find implicit value for "+
+ (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
+ else "parameter "+paramName+": ")+paramTp
+ }
+ }
+ issueNormalTypeError(tree, errMsg)
+ }
+
+ def AdaptTypeError(tree: Tree, found: Type, req: Type) = {
+ // If the expected type is a refinement type, and the found type is a refinement or an anon
+ // class, we can greatly improve the error message by retyping the tree to recover the actual
+ // members present, then display along with the expected members. This is done here because
+ // this is the last point where we still have access to the original tree, rather than just
+ // the found/req types.
+ val foundType: Type = req.normalize match {
+ case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
+ val retyped = typed (tree.duplicate setType null)
+ val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
+
+ if (foundDecls.isEmpty) found
+ else {
+ // The members arrive marked private, presumably because there was no
+ // expected type and so they're considered members of an anon class.
+ foundDecls foreach (_ resetFlag (PRIVATE | PROTECTED))
+ // TODO: if any of the found parents match up with required parents after normalization,
+ // print the error so that they match. The major beneficiary there would be
+ // java.lang.Object vs. AnyRef.
+ refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos)
+ }
+ case _ =>
+ found
+ }
+ assert(!found.isErroneous && !req.isErroneous, (found, req))
+
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
+ if (settings.explaintypes.value)
+ explainTypes(found, req)
+ }
+
+ def WithFilterError(tree: Tree, ex: AbsTypeError) = {
+ issueTypeError(ex)
+ setError(tree)
+ }
+
+ def ParentTypesError(templ: Template, ex: TypeError) = {
+ templ.tpe = null
+ issueNormalTypeError(templ, ex.getMessage())
+ }
+
+ // additional parentTypes errors
+ def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+ issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
+
+ def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
+ issueNormalTypeError(supertpt, "missing type arguments")
+
+ // typedIdent
+ def AmbiguousIdentError(tree: Tree, name: Name, msg: String) =
+ NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg)
+
+ def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = {
+ /*** Disabled pending investigation of performance impact.
+
+ // This laborious determination arrived at to keep the tests working.
+ val calcSimilar = (
+ name.length > 2 && (
+ startingIdentCx.reportErrors
+ || startingIdentCx.enclClassOrMethod.reportErrors
+ )
+ )
+ // avoid calculating if we're in "silent" mode.
+ // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
+ val similar = {
+ if (!calcSimilar) ""
+ else {
+ val allowed = (
+ startingIdentCx.enclosingContextChain
+ flatMap (ctx => ctx.scope.toList ++ ctx.imports.flatMap(_.allImportedSymbols))
+ filter (sym => sym.isTerm == name.isTermName)
+ filterNot (sym => sym.isPackage || sym.isSynthetic || sym.hasMeaninglessName)
+ )
+ val allowedStrings = (
+ allowed.map("" + _.name).distinct.sorted
+ filterNot (s => (s contains '$') || (s contains ' '))
+ )
+ similarString("" + name, allowedStrings)
+ }
+ }
+ */
+ val similar = ""
+ NormalTypeError(tree, "not found: "+decodeWithKind(name, owner) + similar)
+ }
+
+ // typedAppliedTypeTree
+ def AppliedTypeNoParametersError(tree: Tree, errTpe: Type) = {
+ issueNormalTypeError(tree, errTpe + " does not take type parameters")
+ setError(tree)
+ }
+
+ def AppliedTypeWrongNumberOfArgsError(tree: Tree, tpt: Tree, tparams: List[Symbol]) = {
+ val tptSafeString: String = try {
+ tpt.tpe.toString()
+ } catch {
+ case _: CyclicReference =>
+ tpt.toString()
+ }
+ val msg = "wrong number of type arguments for "+tptSafeString+", should be "+tparams.length
+ issueNormalTypeError(tree, msg)
+ setError(tree)
+ }
+
+ // typedTypeDef
+ def LowerBoundError(tree: TypeDef, lowB: Type, highB: Type) =
+ issueNormalTypeError(tree, "lower bound "+lowB+" does not conform to upper bound "+highB)
+
+ def HiddenSymbolWithError[T <: Tree](tree: T): T =
+ setError(tree)
+
+ def SymbolEscapesScopeError[T <: Tree](tree: T, badSymbol: Symbol): T = {
+ val modifierString = if (badSymbol.isPrivate) "private " else ""
+ issueNormalTypeError(tree, modifierString + badSymbol + " escapes its defining scope as part of type "+tree.tpe)
+ setError(tree)
+ }
+
+ // typedDefDef
+ def StarParamNotLastError(param: Tree) =
+ issueNormalTypeError(param, "*-parameter must come last")
+
+ def StarWithDefaultError(meth: Symbol) =
+ issueSymbolTypeError(meth, "a parameter section with a `*'-parameter is not allowed to have default arguments")
+
+ def InvalidConstructorDefError(ddef: Tree) =
+ issueNormalTypeError(ddef, "constructor definition not allowed here")
+
+ def DeprecatedParamNameError(param: Symbol, name: Name) =
+ issueSymbolTypeError(param, "deprecated parameter name "+ name +" has to be distinct from any other parameter name (deprecated or not).")
+
+ // computeParamAliases
+ def SuperConstrReferenceError(tree: Tree) =
+ NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name")
+
+ def SuperConstrArgsThisReferenceError(tree: Tree) =
+ NormalTypeError(tree, "super constructor arguments cannot reference unconstructed `this`")
+
+ // typedValDef
+ def VolatileValueError(vdef: Tree) =
+ issueNormalTypeError(vdef, "values cannot be volatile")
+
+ def FinalVolatileVarError(vdef: Tree) =
+ issueNormalTypeError(vdef, "final vars cannot be volatile")
+
+ def LocalVarUninitializedError(vdef: Tree) =
+ issueNormalTypeError(vdef, "local variables must be initialized")
+
+ //typedAssign
+ def AssignmentError(tree: Tree, varSym: Symbol) = {
+ issueNormalTypeError(tree,
+ if (varSym != null && varSym.isValue) "reassignment to val"
+ else "assignment to non variable")
+ setError(tree)
+ }
+
+ def UnexpectedTreeAssignmentConversionError(tree: Tree) = {
+ issueNormalTypeError(tree, "Unexpected tree during assignment conversion.")
+ setError(tree)
+ }
+
+ def MultiDimensionalArrayError(tree: Tree) = {
+ issueNormalTypeError(tree, "cannot create a generic multi-dimensional array of more than "+ definitions.MaxArrayDims+" dimensions")
+ setError(tree)
+ }
+
+ //typedSuper
+ def MixinMissingParentClassNameError(tree: Tree, mix: Name, clazz: Symbol) =
+ issueNormalTypeError(tree, mix+" does not name a parent class of "+clazz)
+
+ def AmbiguousParentClassError(tree: Tree) =
+ issueNormalTypeError(tree, "ambiguous parent class qualifier")
+
+ //typedSelect
+ def NotAMemberError(sel: Tree, qual: Tree, name: Name) = {
+ def errMsg = {
+ val owner = qual.tpe.typeSymbol
+ val target = qual.tpe.widen
+ def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
+ def nameString = decodeWithKind(name, owner)
+ /** Illuminating some common situations and errors a bit further. */
+ def addendum = {
+ val companion = {
+ if (name.isTermName && owner.isPackageClass) {
+ target.member(name.toTypeName) match {
+ case NoSymbol => ""
+ case sym => "\nNote: %s exists, but it has no companion object.".format(sym)
+ }
+ }
+ else ""
+ }
+ val semicolon = (
+ if (linePrecedes(qual, sel))
+ "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?"
+ else
+ ""
+ )
+ companion + semicolon
+ }
+ withAddendum(qual.pos)(
+ if (name == nme.CONSTRUCTOR) target + " does not have a constructor"
+ else nameString + " is not a member of " + targetKindString + target + addendum
+ )
+ }
+ issueNormalTypeError(sel, errMsg)
+ // the error has to be set for the copied tree, otherwise
+ // the error remains persistent acros multiple compilations
+ // and causes problems
+ //setError(sel)
+ }
+
+ //typedNew
+ def IsAbstractError(tree: Tree, sym: Symbol) = {
+ issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated")
+ setError(tree)
+ }
+
+ def DoesNotConformToSelfTypeError(tree: Tree, sym: Symbol, tpe0: Type) = {
+ issueNormalTypeError(tree, sym + " cannot be instantiated because it does not conform to its self-type " + tpe0)
+ setError(tree)
+ }
+
+ //typedEta
+ def UnderscoreEtaError(tree: Tree) = {
+ issueNormalTypeError(tree, "_ must follow method; cannot follow " + tree.tpe)
+ setError(tree)
+ }
+
+ //typedReturn
+ def ReturnOutsideOfDefError(tree: Tree) = {
+ issueNormalTypeError(tree, "return outside method definition")
+ setError(tree)
+ }
+
+ def ReturnWithoutTypeError(tree: Tree, owner: Symbol) = {
+ issueNormalTypeError(tree, owner + " has return statement; needs result type")
+ setError(tree)
+ }
+
+ //typedBind
+ def VariableInPatternAlternativeError(tree: Tree) = {
+ issueNormalTypeError(tree, "illegal variable in pattern alternative")
+ //setError(tree)
+ }
+
+ //typedCase
+ def StarPositionInPatternError(tree: Tree) =
+ issueNormalTypeError(tree, "_* may only come last")
+
+ //typedFunction
+ def MaxFunctionArityError(fun: Tree) = {
+ issueNormalTypeError(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters")
+ setError(fun)
+ }
+
+ def WrongNumberOfParametersError(tree: Tree, argpts: List[Type]) = {
+ issueNormalTypeError(tree, "wrong number of parameters; expected = " + argpts.length)
+ setError(tree)
+ }
+
+ def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) = {
+ def anonMessage = (
+ "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" +
+ "\nExpected type was: " + pt.toLongString
+ )
+
+ val suffix =
+ if (!vparam.mods.isSynthetic) ""
+ else " for expanded function" + (fun match {
+ case Function(_, Match(_, _)) => anonMessage
+ case _ => " " + fun
+ })
+
+ issueNormalTypeError(vparam, "missing parameter type" + suffix)
+ }
+
+ def ConstructorsOrderError(tree: Tree) = {
+ issueNormalTypeError(tree, "called constructor's definition must precede calling constructor's definition")
+ setError(tree)
+ }
+
+ def OnlyDeclarationsError(tree: Tree) = {
+ issueNormalTypeError(tree, "only declarations allowed here")
+ setError(tree)
+ }
+
+ // typedAnnotation
+ def AnnotationNotAConstantError(tree: Tree) =
+ NormalTypeError(tree, "annotation argument needs to be a constant; found: " + tree)
+
+ def AnnotationArgNullError(tree: Tree) =
+ NormalTypeError(tree, "annotation argument cannot be null")
+
+ def ArrayConstantsError(tree: Tree) =
+ NormalTypeError(tree, "Array constants have to be specified using the `Array(...)' factory method")
+
+ def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) =
+ NormalTypeError(tree, "found array constant, expected argument of type " + pt)
+
+ def UnexpectedTreeAnnotation(tree: Tree) =
+ NormalTypeError(tree, "unexpected tree in annotation: "+ tree)
+
+ def AnnotationTypeMismatchError(tree: Tree, expected: Type, found: Type) =
+ NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found)
+
+ def MultipleArgumentListForAnnotationError(tree: Tree) =
+ NormalTypeError(tree, "multiple argument lists on classfile annotation")
+
+ def UnknownAnnotationNameError(tree: Tree, name: Name) =
+ NormalTypeError(tree, "unknown annotation argument name: " + name)
+
+ def DuplicateValueAnnotationError(tree: Tree, name: Name) =
+ NormalTypeError(tree, "duplicate value for annotation argument " + name)
+
+ def ClassfileAnnotationsAsNamedArgsError(tree: Tree) =
+ NormalTypeError(tree, "classfile annotation arguments have to be supplied as named arguments")
+
+ def AnnotationMissingArgError(tree: Tree, annType: Type, sym: Symbol) =
+ NormalTypeError(tree, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name)
+
+ def NestedAnnotationError(tree: Tree, annType: Type) =
+ NormalTypeError(tree, "nested classfile annotations must be defined in java; found: "+ annType)
+
+ def UnexpectedTreeAnnotationError(tree: Tree, unexpected: Tree) =
+ NormalTypeError(tree, "unexpected tree after typing annotation: "+ unexpected)
+
+ // TODO no test case
+ //typedExistentialTypeTree
+ def AbstractionFromVolatileTypeError(vd: ValDef) =
+ issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
+
+ def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
+ issueNormalTypeError(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ setError(tree)
+ }
+
+ def TypedApplyDoesNotTakeTpeParametersError(tree: Tree, fun: Tree) = {
+ issueNormalTypeError(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
+ setError(tree)
+ }
+
+ // doTypeApply
+ //tryNamesDefaults
+ def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
+
+ def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
+
+ // can it still happen? see test case neg/t960.scala
+ // TODO no test case
+ def OverloadedUnapplyError(tree: Tree) =
+ issueNormalTypeError(tree, "cannot resolve overloaded unapply")
+
+ def UnapplyWithSingleArgError(tree: Tree) =
+ issueNormalTypeError(tree, "an unapply method must accept a single argument.")
+
+ def MultipleVarargError(tree: Tree) =
+ NormalTypeError(tree, "when using named arguments, the vararg parameter has to be specified exactly once")
+
+ def ModuleUsingCompanionClassDefaultArgsErrror(tree: Tree) =
+ NormalTypeError(tree, "module extending its companion class cannot use default constructor arguments")
+
+ def NotEnoughArgsError(tree: Tree, fun0: Tree, missing0: List[Symbol]) = {
+ def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]) = {
+ val suffix = {
+ if (missing.isEmpty) ""
+ else {
+ val keep = missing take 3 map (_.name)
+ ".\nUnspecified value parameter%s %s".format(
+ if (missing.tail.isEmpty) "" else "s",
+ if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ")
+ else keep.mkString("", ", ", ".")
+ )
+ }
+ }
+
+ "not enough arguments for " + treeSymTypeMsg(fun) + suffix
+ }
+ NormalTypeError(tree, notEnoughArgumentsMsg(fun0, missing0))
+ }
+
+ //doTypedApply - patternMode
+ // TODO: missing test case
+ def TooManyArgsPatternError(fun: Tree) =
+ NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+
+ def WrongNumberArgsPatternError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "wrong number of arguments for "+treeSymTypeMsg(fun))
+
+ def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, fun.tpe+" does not take parameters")
+
+ //checkClassType
+ def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
+ issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
+ setError(tpt)
+ }
+
+ def ClassTypeRequiredError(tree: Tree, found: AnyRef) = {
+ issueNormalTypeError(tree, "class type required but "+found+" found")
+ setError(tree)
+ }
+
+ // validateParentClasses
+ def ParentSuperSubclassError(parent: Tree, superclazz: Symbol,
+ parentSym: Symbol, mixin: Symbol) =
+ NormalTypeError(parent, "illegal inheritance; super"+superclazz+
+ "\n is not a subclass of the super"+parentSym+
+ "\n of the mixin " + mixin)
+
+ def ParentNotATraitMixinError(parent: Tree, mixin: Symbol) =
+ NormalTypeError(parent, mixin+" needs to be a trait to be mixed in")
+
+ def ParentFinalInheritanceError(parent: Tree, mixin: Symbol) =
+ NormalTypeError(parent, "illegal inheritance from final "+mixin)
+
+ def ParentSealedInheritanceError(parent: Tree, psym: Symbol) =
+ NormalTypeError(parent, "illegal inheritance from sealed " + psym + ": " + context.unit.source.file.canonicalPath + " != " + psym.sourceFile.canonicalPath)
+
+ def ParentSelfTypeConformanceError(parent: Tree, selfType: Type) =
+ NormalTypeError(parent,
+ "illegal inheritance;\n self-type "+selfType+" does not conform to "+
+ parent +"'s selftype "+parent.tpe.typeOfThis)
+
+ // TODO: missing test case
+ def ParentInheritedTwiceError(parent: Tree, parentSym: Symbol) =
+ NormalTypeError(parent, parentSym+" is inherited twice")
+
+ //adapt
+ def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
+ issueNormalTypeError(tree,
+ "missing arguments for " + meth.fullLocationString + (
+ if (meth.isConstructor) ""
+ else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
+ ))
+ setError(tree)
+ }
+
+ def MissingTypeParametersError(tree: Tree) = {
+ issueNormalTypeError(tree, tree.symbol+" takes type parameters")
+ setError(tree)
+ }
+
+ def KindArityMismatchError(tree: Tree, pt: Type) = {
+ issueNormalTypeError(tree,
+ tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+
+ ", expected: "+countAsString(pt.typeParams.length))
+ setError(tree)
+ }
+
+ def CaseClassConstructorError(tree: Tree) = {
+ issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ setError(tree)
+ }
+
+ //TODO Needs test case
+ def ConstructorPrefixError(tree: Tree, restpe: Type) = {
+ issueNormalTypeError(tree, restpe.prefix+" is not a legal prefix for a constructor")
+ setError(tree)
+ }
+
+ // SelectFromTypeTree
+ def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
+ issueNormalTypeError(tree, "illegal type selection from volatile type "+qual.tpe)
+ setError(tree)
+ }
+
+ // packedType
+ def InferTypeWithVolatileTypeSelectionError(tree: Tree, pre: Type) =
+ issueNormalTypeError(tree, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre)
+
+ def AbstractExistentiallyOverParamerizedTpeError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "can't existentially abstract over parameterized type " + tp)
+
+ //manifestTreee
+ def MissingManifestError(tree: Tree, full: Boolean, tp: Type) = {
+ issueNormalTypeError(tree, "cannot find "+(if (full) "" else "class ")+"manifest for element type "+tp)
+ setError(tree)
+ }
+
+ // TODO needs test case
+ // cases where we do not necessarily return trees
+ def DependentMethodTpeConversionToFunctionError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "method with dependent type "+tp+" cannot be converted to function value")
+
+ //checkStarPatOK
+ def StarPatternWithVarargParametersError(tree: Tree) =
+ issueNormalTypeError(tree, "star patterns must correspond with varargs parameters")
+
+ // TODO missing test case
+ def FinitaryError(tparam: Symbol) =
+ issueSymbolTypeError(tparam, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive")
+
+ // TODO missing test case for a second case
+ def QualifyingClassError(tree: Tree, qual: Name) = {
+ issueNormalTypeError(tree,
+ if (qual.isEmpty) tree + " can be used only in a class, object, or template"
+ else qual + " is not an enclosing class")
+ setError(tree)
+ }
+
+ // def stabilize
+ def NotAValueError(tree: Tree, sym: Symbol) = {
+ issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value")
+ setError(tree)
+ }
+
+ // checkNoDoubleDefs...
+ def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) =
+ issueSymbolTypeError(sym0, sym1+" is defined twice"+
+ {if(!settings.debug.value) "" else " in "+context0.unit}+
+ {if (sym0.isMacro && sym1.isMacro) " \n(note that macros cannot be overloaded)" else ""})
+
+ // cyclic errors
+ def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
+
+ def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+
+ def MacroExpandError(tree: Tree, t: Any) = {
+ issueNormalTypeError(tree, "macros must return a compiler-specific tree; returned class is: " + t.getClass)
+ setError(tree)
+ }
+ }
+ }
+
+ trait InferencerContextErrors {
+ self: Inferencer =>
+
+ private def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
+ def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
+
+ def resType = if (pt isWildcard) "" else " with expected result type " + pt
+ def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
+ def locals = alternatives(tree) flatMap (_.typeParams)
+
+ withDisambiguation(locals, allTypes: _*) {
+ treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType
+ }
+ }
+
+ object InferErrorGen {
+
+ implicit val context0 = getContext
+
+ object PolyAlternativeErrorKind extends Enumeration {
+ type ErrorType = Value
+ val WrongNumber, NoParams, ArgsDoNotConform = Value
+ }
+
+ private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) =
+ if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) {
+ val methodName = nme.defaultGetterToMethod(sym1.name)
+ (sym1.enclClass.pos,
+ "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
+ " define default arguments")
+ } else {
+ (pos,
+ ("ambiguous reference to overloaded definition,\n" +
+ "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
+ "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
+ "\nmatch " + rest)
+ )
+ }
+
+ def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+ def errMsg = {
+ val location = if (sym.isClassConstructor) owner0 else pre.widen
+
+ underlying(sym).fullLocationString + " cannot be accessed in " +
+ location + explanation
+ }
+ NormalTypeError(tree, errMsg, ErrorKinds.Access)
+ }
+
+ def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
+ issueNormalTypeError(fn,
+ "no type parameters for " +
+ applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) +
+ "\n --- because ---\n" + msg)
+
+ // TODO: no test case
+ def NoConstructorInstanceError(tree: Tree, restpe: Type, pt: Type, msg: String) = {
+ issueNormalTypeError(tree,
+ "constructor of type " + restpe +
+ " cannot be uniquely instantiated to expected type " + pt +
+ "\n --- because ---\n" + msg)
+ setError(tree)
+ }
+
+ def ConstrInstantiationError(tree: Tree, restpe: Type, pt: Type) = {
+ issueNormalTypeError(tree,
+ "constructor cannot be instantiated to expected type" + foundReqMsg(restpe, pt))
+ setError(tree)
+ }
+
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) =
+ issueNormalTypeError(tree,
+ applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
+
+ def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
+ firstCompeting: Symbol, argtpes: List[Type], pt: Type) = {
+ val msg0 =
+ "argument types " + argtpes.mkString("(", ",", ")") +
+ (if (pt == WildcardType) "" else " and expected result type " + pt)
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ }
+
+ def NoBestExprAlternativeError(tree: Tree, pt: Type) =
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+
+ def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type) = {
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
+ setError(tree)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ }
+
+ // checkBounds
+ def KindBoundErrors(tree: Tree, prefix: String, targs: List[Type],
+ tparams: List[Symbol], kindErrors: List[String]) = {
+ issueNormalTypeError(tree,
+ prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") +
+ " do not conform to the expected kinds of the type parameters "+
+ tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." +
+ kindErrors.toList.mkString("\n", ", ", ""))
+ }
+
+ def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
+ tparams: List[Symbol], kindErrors: List[String]) = {
+ if (settings.explaintypes.value) {
+ val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
+ ()
+ }
+
+ issueNormalTypeError(tree,
+ prefix + "type arguments " + targs.mkString("[", ",", "]") +
+ " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
+ (tparams map (_.defString)).mkString("[", ",", "]"))
+ }
+
+ //substExpr
+ def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
+ issueNormalTypeError(tree,
+ "polymorphic expression cannot be instantiated to expected type" +
+ foundReqMsg(polyType(undetparams, skipImplicit(tree.tpe)), pt))
+
+ //checkCheckable
+ def TypePatternOrIsInstanceTestError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "type "+tp+" cannot be used in a type pattern or isInstanceOf test")
+
+ def PatternTypeIncompatibleWithPtError1(tree: Tree, pattp: Type, pt: Type) =
+ issueNormalTypeError(tree, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt))
+
+ def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) =
+ issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
+
+ def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) = {
+ def errMsg = {
+ val sym = pat.tpe.typeSymbol
+ val clazz = sym.companionClass
+ val addendum = (
+ if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) {
+ // TODO: move these somewhere reusable.
+ val typeString = clazz.typeParams match {
+ case Nil => "" + clazz.name
+ case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
+ }
+ val caseString = (
+ clazz.caseFieldAccessors
+ map (_ => "_") // could use the actual param names here
+ mkString (clazz.name + "(", ",", ")")
+ )
+ (
+ "\nNote: if you intended to match against the class, try `case _: " +
+ typeString + "` or `case " + caseString + "`"
+ )
+ }
+ else ""
+ )
+ "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum
+ }
+ issueNormalTypeError(pat, errMsg)
+ }
+
+ def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = {
+ import PolyAlternativeErrorKind._
+ val msg =
+ err match {
+ case WrongNumber =>
+ "wrong number of type parameters for " + treeSymTypeMsg(tree)
+ case NoParams =>
+ treeSymTypeMsg(tree) + " does not take type parameters"
+ case ArgsDoNotConform =>
+ "type arguments " + argtypes.mkString("[", ",", "]") +
+ " conform to the bounds of none of the overloaded alternatives of\n "+sym+
+ ": "+sym.info
+ }
+ issueNormalTypeError(tree, msg)
+ ()
+ }
+ }
+ }
+
+ trait NamerContextErrors {
+ self: Namer =>
+
+ object NamerErrorGen {
+
+ implicit val context0 = context
+
+ object SymValidateErrors extends Enumeration {
+ val ImplicitConstr, ImplicitNotTerm, ImplicitTopObject,
+ OverrideClass, SealedNonClass, AbstractNonClass,
+ OverrideConstr, AbstractOverride, LazyAndEarlyInit,
+ ByNameParameter, AbstractVar = Value
+ }
+
+ object DuplicatesErrorKinds extends Enumeration {
+ val RenamedTwice, AppearsTwice = Value
+ }
+
+ import SymValidateErrors._
+ import DuplicatesErrorKinds._
+ import symtab.Flags
+
+ def TypeSigError(tree: Tree, ex: TypeError) = {
+ ex match {
+ case CyclicReference(sym, info: TypeCompleter) =>
+ issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+ case _ =>
+ context0.issue(TypeErrorWithUnderlyingTree(tree, ex))
+ }
+ }
+
+ def GetterDefinedTwiceError(getter: Symbol) =
+ issueSymbolTypeError(getter, getter+" is defined twice")
+
+ def ValOrValWithSetterSuffixError(tree: Tree) =
+ issueNormalTypeError(tree, "Names of vals or vars may not end in `_='")
+
+ def PrivateThisCaseClassParameterError(tree: Tree) =
+ issueNormalTypeError(tree, "private[this] not allowed for case class parameters")
+
+ def BeanPropertyAnnotationLimitationError(tree: Tree) =
+ issueNormalTypeError(tree, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import")
+
+ def BeanPropertyAnnotationFieldWithoutLetterError(tree: Tree) =
+ issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to fields that start with a letter")
+
+ def BeanPropertyAnnotationPrivateFieldError(tree: Tree) =
+ issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to non-private fields")
+
+ def DoubleDefError(currentSym: Symbol, prevSym: Symbol) = {
+ val s1 = if (prevSym.isModule) "case class companion " else ""
+ val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
+ val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
+
+ issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3)
+ }
+
+ def MaxParametersCaseClassError(tree: Tree) =
+ issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
+
+ def InheritsItselfError(tree: Tree) =
+ issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
+
+ def MissingParameterOrValTypeError(vparam: Tree) =
+ issueNormalTypeError(vparam, "missing parameter type")
+
+ def RootImportError(tree: Tree) =
+ issueNormalTypeError(tree, "_root_ cannot be imported")
+
+ def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value) {
+ val msg = errKind match {
+ case ImplicitConstr =>
+ "`implicit' modifier not allowed for constructors"
+
+ case ImplicitNotTerm =>
+ "`implicit' modifier can be used only for values, variables and methods"
+
+ case ImplicitTopObject =>
+ "`implicit' modifier cannot be used for top-level objects"
+
+ case OverrideClass =>
+ "`override' modifier not allowed for classes"
+
+ case SealedNonClass =>
+ "`sealed' modifier can be used only for classes"
+
+ case AbstractNonClass =>
+ "`abstract' modifier can be used only for classes; it should be omitted for abstract members"
+
+ case OverrideConstr =>
+ "`override' modifier not allowed for constructors"
+
+ case AbstractOverride =>
+ "`abstract override' modifier only allowed for members of traits"
+
+ case LazyAndEarlyInit =>
+ "`lazy' definitions may not be initialized early"
+
+ case ByNameParameter =>
+ "pass-by-name arguments not allowed for case class parameters"
+
+ case AbstractVar =>
+ "only classes can have declared but undefined members" + abstractVarMessage(sym)
+
+ }
+ issueSymbolTypeError(sym, msg)
+ }
+
+
+ def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
+ issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+
+ def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
+ issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
+ Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+
+ def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
+ val errorAddendum =
+ ": parameter appears in the type of another parameter in the same section or an earlier one"
+ issueSymbolTypeError(sym, "illegal dependent method type" + errorAddendum)(context)
+ }
+
+ def DuplicatesError(tree: Tree, name: Name, kind: DuplicatesErrorKinds.Value) = {
+ val msg = kind match {
+ case RenamedTwice =>
+ "is renamed twice"
+ case AppearsTwice =>
+ "appears twice as a target of a renaming"
+ }
+
+ issueNormalTypeError(tree, name.decode + " " + msg)
+ }
+ }
+ }
+
+ trait ImplicitsContextErrors {
+ self: ImplicitSearch =>
+
+ import definitions._
+
+ def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
+ pre1: String, pre2: String, trailer: String)
+ (isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
+ if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
+ val coreMsg =
+ pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+
+ pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+
+ trailer
+ val errMsg =
+ if (isView) {
+ val found = pt.typeArgs(0)
+ val req = pt.typeArgs(1)
+ def defaultExplanation =
+ "Note that implicit conversions are not applicable because they are ambiguous:\n "+
+ coreMsg+"are possible conversion functions from "+ found+" to "+req
+
+ def explanation = {
+ val sym = found.typeSymbol
+ // Explain some common situations a bit more clearly.
+ if (AnyRefClass.tpe <:< req) {
+ if (sym == AnyClass || sym == UnitClass) {
+ "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
+ "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
+ }
+ else boxedClass get sym match {
+ case Some(boxed) =>
+ "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
+ "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
+ "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
+ "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
+ case _ =>
+ defaultExplanation
+ }
+ }
+ else defaultExplanation
+ }
+
+ typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + "\n" + explanation
+ } else {
+ "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
+ }
+ context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos, errMsg))
+ }
+ }
+
+ def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
+ issueDivergentImplicitsError(tree,
+ "diverging implicit expansion for type "+pt+"\nstarting with "+
+ sym.fullLocationString)
+ }
+
+ object NamesDefaultsErrorsGen {
+ import typer.infer.setError
+
+ def NameClashError(sym: Symbol, arg: Tree)(implicit context: Context) = {
+ setError(arg) // to distinguish it from ambiguous reference error
+
+ def errMsg =
+ "%s definition needs %s because '%s' is used as a named argument in its body.".format(
+ "variable", // "method"
+ "type", // "result type"
+ sym.name)
+ issueSymbolTypeError(sym, errMsg)
+ }
+
+ def AmbiguousReferenceInNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
+ if (!arg.isErroneous) { // check if name clash wasn't reported already
+ issueNormalTypeError(arg,
+ "reference to "+ name +" is ambiguous; it is both a method parameter "+
+ "and a variable in scope.")
+ setError(arg)
+ } else arg
+ }
+
+ def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
+ issueNormalTypeError(arg, "unknown parameter name: " + name)
+ setError(arg)
+ }
+
+ def DoubleParamNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
+ issueNormalTypeError(arg, "parameter specified twice: "+ name)
+ setError(arg)
+ }
+
+ def PositionalAfterNamedNamesDefaultError(arg: Tree)(implicit context: Context) = {
+ issueNormalTypeError(arg, "positional after named argument.")
+ setError(arg)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 1d9eb9c292..740acbd10f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.collection.mutable.ListBuffer
+import scala.collection.mutable.{LinkedHashSet, Set}
import annotation.tailrec
/**
@@ -42,11 +42,11 @@ trait Contexts { self: Analyzer =>
*
* - if option `-Yno-imports` is given, nothing is imported
* - if the unit is java defined, only `java.lang` is imported
- * - if option `-Yno-predef` is given, if the unit has an import of Predef
- * among its leading imports, or if the unit is [[scala.ScalaObject]]
+ * - if option `-Yno-predef` is given, if the unit body has an import of Predef
+ * among its leading imports, or if the tree is [[scala.ScalaObject]]
* or [[scala.Predef]], `Predef` is not imported.
*/
- protected def rootImports(unit: CompilationUnit, tree: Tree): List[Symbol] = {
+ protected def rootImports(unit: CompilationUnit): List[Symbol] = {
import definitions._
assert(isDefinitionsInitialized, "definitions uninitialized")
@@ -56,26 +56,17 @@ trait Contexts { self: Analyzer =>
else List(JavaLangPackage, ScalaPackage, PredefModule)
}
- def rootContext(unit: CompilationUnit): Context =
- rootContext(unit, EmptyTree, false)
-
+ def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
+ def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
import definitions._
var sc = startContext
- def addImport(pkg: Symbol) {
- assert(pkg ne null)
- val qual = gen.mkAttributedStableRef(pkg)
- sc = sc.makeNewImport(
- Import(qual, List(ImportSelector(nme.WILDCARD, -1, null, -1)))
- .setSymbol(NoSymbol.newImport(NoPosition).setFlag(SYNTHETIC).setInfo(ImportType(qual)))
- .setType(NoType))
+ for (sym <- rootImports(unit)) {
+ sc = sc.makeNewImport(sym)
sc.depth += 1
}
- for (imp <- rootImports(unit, tree))
- addImport(imp)
val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
- c.reportAmbiguousErrors = !erasedTypes
- c.reportGeneralErrors = !erasedTypes
+ if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
c.implicitsEnabled = !erasedTypes
c
}
@@ -91,7 +82,17 @@ trait Contexts { self: Analyzer =>
}
}
+ private object Errors {
+ final val ReportErrors = 1 << 0
+ final val BufferErrors = 1 << 1
+ final val AmbiguousErrors = 1 << 2
+ final val notThrowMask = ReportErrors | BufferErrors
+ final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors
+ }
+
class Context private[typechecker] {
+ import Errors._
+
var unit: CompilationUnit = NoCompilationUnit
var tree: Tree = _ // Tree associated with this context
var owner: Symbol = NoSymbol // The current owner
@@ -117,8 +118,6 @@ trait Contexts { self: Analyzer =>
// (the call to the super or self constructor in the first line of a constructor)
// in this context the object's fields should not be in scope
- var reportAmbiguousErrors = false
- var reportGeneralErrors = false
var diagnostic: List[String] = Nil // these messages are printed when issuing an error
var implicitsEnabled = false
var checking = false
@@ -130,6 +129,10 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
+ def enclClassOrMethod: Context =
+ if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ else outer.enclClassOrMethod
+
def undetparamsString =
if (undetparams.isEmpty) ""
else undetparams.mkString("undetparams=", ", ", "")
@@ -142,12 +145,41 @@ trait Contexts { self: Analyzer =>
tparams
}
- def withoutReportingErrors[T](op: => T): T = {
- val saved = reportGeneralErrors
- reportGeneralErrors = false
- try op
- finally reportGeneralErrors = saved
+ private[this] var mode = 0
+ private[this] val buffer = LinkedHashSet[AbsTypeError]()
+
+ def errBuffer = buffer
+ def hasErrors = buffer.nonEmpty
+
+ def state: Int = mode
+ def restoreState(state0: Int) = mode = state0
+
+ def reportErrors = (state & ReportErrors) != 0
+ def bufferErrors = (state & BufferErrors) != 0
+ def ambiguousErrors = (state & AmbiguousErrors) != 0
+ def throwErrors = (state & notThrowMask) == 0
+
+ def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
+ def setBufferErrors() = {
+ assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
+ mode = BufferErrors
+ }
+ def setThrowErrors() = mode &= (~AllMask)
+ def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
+
+ def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
+ def condBufferFlush(removeP: AbsTypeError => Boolean) {
+ val elems = buffer.filter(removeP)
+ buffer --= elems
+ }
+ def flushBuffer() { buffer.clear() }
+ def flushAndReturnBuffer(): Set[AbsTypeError] = {
+ val current = buffer.clone()
+ buffer.clear()
+ current
}
+
+ def logError(err: AbsTypeError) = buffer += err
def withImplicitsDisabled[T](op: => T): T = {
val saved = implicitsEnabled
@@ -187,8 +219,7 @@ trait Contexts { self: Analyzer =>
c.depth = if (scope == this.scope) this.depth else this.depth + 1
c.imports = imports
c.inSelfSuperCall = inSelfSuperCall
- c.reportAmbiguousErrors = this.reportAmbiguousErrors
- c.reportGeneralErrors = this.reportGeneralErrors
+ c.restoreState(this.state)
c.diagnostic = this.diagnostic
c.typingIndentLevel = typingIndentLevel
c.implicitsEnabled = this.implicitsEnabled
@@ -200,13 +231,16 @@ trait Contexts { self: Analyzer =>
c
}
+ // TODO: remove? Doesn't seem to be used
def make(unit: CompilationUnit): Context = {
val c = make(unit, EmptyTree, owner, scope, imports)
- c.reportAmbiguousErrors = true
- c.reportGeneralErrors = true
+ c.setReportErrors()
c.implicitsEnabled = true
c
}
+
+ def makeNewImport(sym: Symbol): Context =
+ makeNewImport(gen.mkWildcardImport(sym))
def makeNewImport(imp: Import): Context =
make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
@@ -219,7 +253,7 @@ trait Contexts { self: Analyzer =>
make(unit, tree, owner, scope, imports)
def makeNewScope(tree: Tree, owner: Symbol): Context =
- make(tree, owner, new Scope(scope))
+ make(tree, owner, newNestedScope(scope))
// IDE stuff: distinguish between scopes created for typing and scopes created for naming.
def make(tree: Tree, owner: Symbol): Context =
@@ -230,8 +264,8 @@ trait Contexts { self: Analyzer =>
def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
val c = make(newtree)
- c.reportGeneralErrors = false
- c.reportAmbiguousErrors = reportAmbiguousErrors
+ c.setBufferErrors()
+ c.setAmbiguousErrors(reportAmbiguousErrors)
c
}
@@ -243,13 +277,11 @@ trait Contexts { self: Analyzer =>
def makeConstructorContext = {
var baseContext = enclClass.outer
- //todo: find out why we need next line
while (baseContext.tree.isInstanceOf[Template])
baseContext = baseContext.outer
val argContext = baseContext.makeNewScope(tree, owner)
argContext.inSelfSuperCall = true
- argContext.reportGeneralErrors = this.reportGeneralErrors
- argContext.reportAmbiguousErrors = this.reportAmbiguousErrors
+ argContext.restoreState(this.state)
def enterElems(c: Context) {
def enterLocalElems(e: ScopeEntry) {
if (e != null && e.owner == c.scope) {
@@ -276,41 +308,41 @@ trait Contexts { self: Analyzer =>
private def unitError(pos: Position, msg: String) =
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+ def issue(err: AbsTypeError) {
+ if (reportErrors) unitError(err.errPos, addDiagString(err.errMsg))
+ else if (bufferErrors) { buffer += err }
+ else throw new TypeError(err.errPos, err.errMsg)
+ }
+
+ def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
+ if (ambiguousErrors) {
+ if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
+ unitError(err.errPos, err.errMsg)
+ } else if (bufferErrors) { buffer += err }
+ else throw new TypeError(err.errPos, err.errMsg)
+ }
+
+ def issueAmbiguousError(err: AbsTypeError) {
+ if (ambiguousErrors)
+ unitError(err.errPos, addDiagString(err.errMsg))
+ else if (bufferErrors) { buffer += err }
+ else throw new TypeError(err.errPos, err.errMsg)
+ }
+
+ // TODO remove
def error(pos: Position, err: Throwable) =
- if (reportGeneralErrors) unitError(pos, addDiagString(err.getMessage()))
+ if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
else throw err
def error(pos: Position, msg: String) = {
val msg1 = addDiagString(msg)
- if (reportGeneralErrors) unitError(pos, msg1)
+ if (reportErrors) unitError(pos, msg1)
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String) = {
- if (reportGeneralErrors) unit.warning(pos, msg)
- }
-
- def ambiguousError(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) {
- val (reportPos, msg) = (
- if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) {
- val methodName = nme.defaultGetterToMethod(sym1.name)
- (sym1.enclClass.pos,
- "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
- " define default arguments")
- }
- else {
- (pos,
- ("ambiguous reference to overloaded definition,\n" +
- "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
- "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
- "\nmatch " + rest)
- )
- }
- )
- if (reportAmbiguousErrors) {
- if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
- unit.error(reportPos, msg)
- } else throw new TypeError(pos, msg)
+ def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
+ def warning(pos: Position, msg: String, force: Boolean) {
+ if (reportErrors || force) unit.warning(pos, msg)
}
def isLocal(): Boolean = tree match {
@@ -344,8 +376,8 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s)".format(
- owner.fullName, tree.shortClass, unit, scope.##
+ override def toString = "Context(%s@%s unit=%s scope=%s errors=%b)".format(
+ owner.fullName, tree.shortClass, unit, scope.##, hasErrors
)
/** Is `sub` a subclass of `base` or a companion object of such a subclass?
*/
@@ -553,7 +585,7 @@ trait Contexts { self: Analyzer =>
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
} else if (imports != nextOuter.imports) {
- assert(imports.tail == nextOuter.imports)
+ assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
collectImplicitImports(imports.head)
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 2bed5bffd4..179bea0035 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags
-
import scala.collection.{ mutable, immutable }
/** Duplicate trees and re-type check them, taking care to replace
@@ -18,6 +17,7 @@ import scala.collection.{ mutable, immutable }
*/
abstract class Duplicators extends Analyzer {
import global._
+ import definitions.{ AnyRefClass, AnyValClass }
def retyped(context: Context, tree: Tree): Tree = {
resetClassOwners
@@ -248,7 +248,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
- if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE)
+ //if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
vdef.tpt.tpe = fixType(vdef.tpt.tpe)
vdef.tpe = null
super.typed(vdef, mode, pt)
@@ -308,17 +308,26 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
val scrutTpe = scrut1.tpe.widen
- val cases1 = if (scrutTpe.isFinalType) cases filter {
- case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case CaseDef(Typed(_, tpt), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case _ => true
- } else cases
+ val cases1 = {
+ if (scrutTpe.isFinalType) cases filter {
+ case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case CaseDef(Typed(_, tpt), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case _ => true
+ }
+ // Without this, AnyRef specializations crash on patterns like
+ // case _: Boolean => ...
+ // Not at all sure this is safe.
+ else if (scrutTpe <:< AnyRefClass.tpe)
+ cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else
+ cases
+ }
super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index d54cb248cf..3d2f86d54d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -31,22 +31,29 @@ trait Implicits {
import typeDebug.{ ptTree, ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
+ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, true)
+
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
- * @param tree The tree for which the implicit needs to be inserted.
- * (the inference might instantiate some of the undetermined
- * type parameters of that tree.
- * @param pt The expected type of the implicit.
- * @param reportAmbiguous Should ambiguous implicit errors be reported?
- * False iff we search for a view to find out
- * whether one type is coercible to another.
- * @param isView We are looking for a view
- * @param context The current context
- * @return A search result
+ * @param tree The tree for which the implicit needs to be inserted.
+ * (the inference might instantiate some of the undetermined
+ * type parameters of that tree.
+ * @param pt The expected type of the implicit.
+ * @param reportAmbiguous Should ambiguous implicit errors be reported?
+ * False iff we search for a view to find out
+ * whether one type is coercible to another.
+ * @param isView We are looking for a view
+ * @param context The current context
+ * @param saveAmbiguousDivergent False if any divergent/ambiguous errors should be ignored after
+ * implicits search,
+ * true if they should be reported (used in further typechecking).
+ * @return A search result
*/
- def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = {
- printInference("[inferImplicit%s] pt = %s".format(
- if (isView) " view" else "", pt)
+ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult = {
+ printInference("[infer %s] %s with pt=%s in %s".format(
+ if (isView) "view" else "implicit",
+ tree, pt, context.owner.enclClass)
)
printTyping(
ptBlock("infer implicit" + (if (isView) " view" else ""),
@@ -63,9 +70,11 @@ trait Implicits {
val start = startTimer(implicitNanos)
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
-
- val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit
- printInference("[inferImplicit] result: " + result)
+ val implicitSearchContext = context.makeImplicit(reportAmbiguous)
+ val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext).bestImplicit
+ if (saveAmbiguousDivergent && implicitSearchContext.hasErrors)
+ context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
+ printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
stopTimer(implicitNanos, start)
@@ -93,21 +102,13 @@ trait Implicits {
private val ManifestSymbols = Set(PartialManifestClass, FullManifestClass, OptManifestClass)
- /** Map all type params in given list to WildcardType
- * @param tparams The list of type parameters to map
- * @param tp The type in which to do the mapping
- */
- private def tparamsToWildcards(tparams: List[Symbol], tp: Type) =
- if (tparams.isEmpty) tp
- else tp.instantiateTypeParams(tparams, tparams map (_ => WildcardType))
-
/* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
* Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
* when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
* so we have to approximate (otherwise it is excluded a priori).
*/
private def depoly(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) => tparamsToWildcards(tparams, ApproximateDependentMap(restpe))
+ case PolyType(tparams, restpe) => deriveTypeWithWildcards(tparams)(ApproximateDependentMap(restpe))
case _ => ApproximateDependentMap(tp)
}
@@ -191,12 +192,10 @@ trait Implicits {
*/
def memberWildcardType(name: Name, tp: Type) = {
val result = refinedType(List(WildcardType), NoSymbol)
- var psym = name match {
- case x: TypeName => result.typeSymbol.newAbstractType(NoPosition, x)
- case x: TermName => result.typeSymbol.newValue(NoPosition, x)
+ name match {
+ case x: TermName => result.typeSymbol.newValue(x) setInfoAndEnter tp
+ case x: TypeName => result.typeSymbol.newAbstractType(x) setInfoAndEnter tp
}
- psym setInfo tp
- result.decls enter psym
result
}
@@ -214,10 +213,10 @@ trait Implicits {
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
+ val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+ def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
+
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
- def templateArgType(argtpe: Type) =
- new BoundedWildcardType(TypeBounds(argtpe, AnyClass.tpe))
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "typer$dummy")
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
}
@@ -253,7 +252,8 @@ trait Implicits {
* @param isView We are looking for a view
* @param context0 The context used for the implicit search
*/
- class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context) extends Typer(context0) {
+ class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context)
+ extends Typer(context0) with ImplicitsContextErrors {
printTyping(
ptBlock("new ImplicitSearch",
"tree" -> tree,
@@ -336,50 +336,6 @@ trait Implicits {
incCounter(implicitSearchCount)
- /** Issues an error signalling ambiguous implicits */
- private def ambiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
- pre1: String, pre2: String, trailer: String) =
- if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
- val coreMsg =
- pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+
- pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+
- trailer
- error(tree.pos,
- if (isView) {
- val found = pt.typeArgs(0)
- val req = pt.typeArgs(1)
- def defaultExplanation =
- "Note that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
-
- def explanation = {
- val sym = found.typeSymbol
- // Explain some common situations a bit more clearly.
- if (AnyRefClass.tpe <:< req) {
- if (sym == AnyClass || sym == UnitClass) {
- "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
- "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
- }
- else boxedClass get sym match {
- case Some(boxed) =>
- "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
- "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
- "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
- "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
- case _ =>
- defaultExplanation
- }
- }
- else defaultExplanation
- }
-
- typeErrorMsg(found, req) + "\n" + explanation
- }
- else {
- "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
- })
- }
-
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
@@ -395,7 +351,6 @@ trait Implicits {
* @pre `info.tpe` does not contain an error
*/
private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- printInference("[typedImplicit] " + info)
(context.openImplicits find { case (tp, sym) => sym == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
// println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
@@ -410,9 +365,7 @@ trait Implicits {
// println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
if (context.openImplicits.tail.isEmpty) {
if (!(pt.isErroneous))
- context.unit.error(
- tree.pos, "diverging implicit expansion for type "+pt+"\nstarting with "+
- info.sym.fullLocationString)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
SearchFailure
} else {
throw DivergentImplicit
@@ -588,6 +541,9 @@ trait Implicits {
else
typed1(itree, EXPRmode, wildPt)
+ if (context.hasErrors)
+ return fail("typed implicit %s has errors".format(info.sym.fullLocationString))
+
incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
@@ -607,23 +563,22 @@ trait Implicits {
}
}
- if (itree2.tpe.isError)
- SearchFailure
+ if (context.hasErrors)
+ fail("hasMatchingSymbol reported threw error(s)")
else if (!hasMatchingSymbol(itree1))
fail("candidate implicit %s is shadowed by other implicit %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
-
- if (matchesPt(itree2.tpe, pt.instantiateTypeParams(undetParams, tvars), undetParams)) {
- printInference(
- ptBlock("matchesPt",
- "itree1" -> itree1,
- "tvars" -> tvars,
- "undetParams" -> undetParams
- )
- )
-
+ def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
+
+ printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
+ if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
+ typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
+ itree2.tpe, pt
+ ))
+
+ if (matchesPt(itree2.tpe, ptInstantiated, undetParams)) {
if (tvars.nonEmpty)
printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
@@ -631,12 +586,15 @@ trait Implicits {
false, lubDepth(List(itree2.tpe, pt)))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
- checkBounds(itree2.pos, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+ checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+ if (context.hasErrors)
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree")
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
// prototype == WildcardType: want to remove all inferred Nothings
val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, tvars, targs)
+
val subst: TreeTypeSubstituter =
if (okParams.isEmpty) EmptyTreeTypeSubstituter
else {
@@ -656,22 +614,29 @@ trait Implicits {
// re-typecheck)
// TODO: the return tree is ignored. This seems to make
// no difference, but it's bad practice regardless.
- itree2 match {
+
+
+ val checked = itree2 match {
case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
}
- val result = new SearchResult(itree2, subst)
- incCounter(foundImplicits)
- printInference("[typedImplicit1] SearchResult: " + result)
- result
+
+ if (context.hasErrors)
+ fail("typing TypeApply reported errors for the implicit tree")
+ else {
+ val result = new SearchResult(itree2, subst)
+ incCounter(foundImplicits)
+ printInference("[success] found %s for pt %s".format(result, ptInstantiated))
+ result
+ }
}
- else fail("incompatible: %s does not match expected type %s".format(
- itree2.tpe, pt.instantiateTypeParams(undetParams, tvars)))
+ else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
}
}
catch {
- case ex: TypeError => fail(ex.getMessage())
+ case ex: TypeError =>
+ fail(ex.getMessage())
}
}
@@ -786,16 +751,11 @@ trait Implicits {
// most frequent one first
matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg)
}
- def eligibleString = {
- val args = List(
- "search" -> pt,
- "target" -> tree,
- "isView" -> isView
- ) ++ eligible.map("eligible" -> _)
-
- ptBlock("Implicit search in " + context, args: _*)
- }
- printInference(eligibleString)
+ if (eligible.nonEmpty)
+ printInference("[search%s] %s with pt=%s in %s, eligible:\n %s".format(
+ if (isView) " view" else "",
+ tree, pt, context.owner.enclClass, eligible.mkString("\n "))
+ )
/** Faster implicit search. Overall idea:
* - prune aggressively
@@ -810,13 +770,24 @@ trait Implicits {
catch divergenceHandler
tryImplicitInfo(i) match {
- case SearchFailure => rankImplicits(is, acc)
+ case SearchFailure =>
+ // We don't want errors that occur during checking implicit info
+ // to influence the check of further infos.
+ context.condBufferFlush(_.kind != ErrorKinds.Divergent)
+ rankImplicits(is, acc)
case newBest =>
best = newBest
val newPending = undoLog undo {
is filterNot (alt => alt == i || {
try improves(i, alt)
- catch { case e: CyclicReference => true }
+ catch {
+ case e: CyclicReference =>
+ if (printInfers) {
+ println(i+" discarded because cyclic reference occurred")
+ e.printStackTrace()
+ }
+ true
+ }
})
}
rankImplicits(newPending, i :: acc)
@@ -838,7 +809,8 @@ trait Implicits {
case chosen :: rest =>
rest find (alt => !improves(chosen, alt)) match {
case Some(competing) =>
- ambiguousImplicitError(chosen, competing, "both", "and", "")
+ AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context)
+ return SearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala
case _ =>
if (isView) chosen.useCountView += 1
else chosen.useCountArg += 1
@@ -1191,7 +1163,7 @@ trait Implicits {
/* !!! the following is almost right, but we have to splice nested manifest
* !!! types into this type. This requires a substantial extension of
* !!! reifiers.
- val reifier = new liftcode.Reifier()
+ val reifier = new Reifier()
val rtree = reifier.reifyTopLevel(tp1)
manifestFactoryCall("apply", tp, rtree)
*/
@@ -1239,12 +1211,14 @@ trait Implicits {
incCounter(inscopeImplicitHits)
}
if (result == SearchFailure) {
+ val previousErrs = context.flushAndReturnBuffer()
val failstart = startTimer(oftypeFailNanos)
val succstart = startTimer(oftypeSucceedNanos)
result = implicitManifestOrOfExpectedType(pt)
if (result == SearchFailure) {
+ context.updateBuffer(previousErrs)
stopTimer(oftypeFailNanos, failstart)
} else {
stopTimer(oftypeSucceedNanos, succstart)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 5b38ddd092..b97fbebec2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -9,7 +9,6 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
-import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
import symtab.Flags._
import scala.annotation.tailrec
@@ -139,6 +138,9 @@ trait Infer {
def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
+ if (tvars.nonEmpty)
+ printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
+
if (!solve(tvars, tparams, variances, upper, depth)) {
// no panic, it's good enough to just guess a solution, we'll find out
// later whether it works. *ZAP* @M danger, Will Robinson! this means
@@ -180,7 +182,7 @@ trait Infer {
case NullaryMethodType(restpe) =>
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
- ExistentialType(tparams, normalize(qtpe))
+ newExistentialType(tparams, normalize(qtpe))
case tp1 =>
tp1 // @MAT aliases already handled by subtyping
}
@@ -189,12 +191,14 @@ trait Infer {
private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
- class Inferencer(context: Context) {
+ class Inferencer(context: Context) extends InferencerContextErrors {
+ import InferErrorGen._
+
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
def name = newTermName("<error: " + tree.symbol + ">")
- def errorClass = if (context.reportGeneralErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
- def errorValue = if (context.reportGeneralErrors) context.owner.newErrorValue(name) else stdErrorValue
+ def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
+ def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
if (tree.hasSymbol)
@@ -203,59 +207,12 @@ trait Infer {
tree setType ErrorType
}
- def error(pos: Position, msg: String) {
- context.error(pos, msg)
- }
-
- def errorTree(tree: Tree, msg: String): Tree = {
- if (!tree.isErroneous) error(tree.pos, msg)
- setError(tree)
- }
-
- def typeError(pos: Position, found: Type, req: Type) {
- if (!found.isErroneous && !req.isErroneous) {
- error(pos, withAddendum(pos)(typeErrorMsg(found, req)))
-
- if (settings.explaintypes.value)
- explainTypes(found, req)
- }
- }
-
- def typeErrorMsg(found: Type, req: Type) = {
- def isPossiblyMissingArgs = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
- def missingArgsMsg = if (isPossiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
-
- "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
- }
-
- def typeErrorTree(tree: Tree, found: Type, req: Type): Tree = {
- // If the expected type is a refinement type, and the found type is a refinement or an anon
- // class, we can greatly improve the error message by retyping the tree to recover the actual
- // members present, then display along with the expected members. This is done here because
- // this is the last point where we still have access to the original tree, rather than just
- // the found/req types.
- val foundType: Type = req.normalize match {
- case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
- val retyped = typer typed (tree.duplicate setType null)
- val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
-
- if (foundDecls.isEmpty) found
- else {
- // The members arrive marked private, presumably because there was no
- // expected type and so they're considered members of an anon class.
- foundDecls foreach (_ resetFlag (PRIVATE | PROTECTED))
- // TODO: if any of the found parents match up with required parents after normalization,
- // print the error so that they match. The major beneficiary there would be
- // java.lang.Object vs. AnyRef.
- refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos)
- }
- case _ =>
- found
- }
- typeError(tree.pos, foundType, req)
- setError(tree)
- }
+ def getContext = context
+ def issue(err: AbsTypeError): Unit = context.issue(err)
+
+ def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
+
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -271,13 +228,12 @@ trait Infer {
if (sym.isError) {
tree setSymbol sym setType ErrorType
} else {
- val topClass = context.owner.toplevelClass
+ val topClass = context.owner.enclosingTopLevelClass
if (context.unit.exists)
- context.unit.depends += sym.toplevelClass
+ context.unit.depends += sym.enclosingTopLevelClass
var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
// Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
-
if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
sym1 = sym
@@ -287,7 +243,7 @@ trait Infer {
Console.println(tree)
Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
}
- new AccessError(tree, sym, pre,
+ ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
if (settings.check.isDefault)
analyzer.lastAccessCheckDetails
else
@@ -301,7 +257,8 @@ trait Infer {
"context.owner" -> context.owner,
"context.outer.enclClass.owner" -> context.outer.enclClass.owner
)
- )
+ ))(context)
+ setError(tree)
}
else {
if (sym1.isTerm)
@@ -314,10 +271,11 @@ trait Infer {
if (settings.debug.value) ex.printStackTrace
val sym2 = underlyingSymbol(sym1)
val itype = pre.memberType(sym2)
- new AccessError(tree, sym, pre,
- "\n because its instance type "+itype+
- (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg)).emit()
+ ErrorUtils.issueTypeError(
+ AccessError(tree, sym, pre, context.enclClass.owner,
+ "\n because its instance type "+itype+
+ (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
+ else " contains a "+ex.msg)))(context)
ErrorType
}
}
@@ -364,10 +322,7 @@ trait Infer {
def makeFullyDefined(tp: Type): Type = {
val tparams = new ListBuffer[Symbol]
def addTypeParam(bounds: TypeBounds): Type = {
- val tparam =
- context.owner.newAbstractType(context.tree.pos.focus, newTypeName("_"+tparams.size))
- .setFlag(EXISTENTIAL)
- .setInfo(bounds)
+ val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
tparams += tparam
tparam.tpe
}
@@ -459,13 +414,14 @@ trait Infer {
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
- (tparams, tvars).zipped map ((tparam, tvar) =>
+ map2(tparams, tvars)((tparam, tvar) =>
instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
else
tvars map (tvar => WildcardType)
}
object AdjustedTypeArgs {
+ val Result = collection.mutable.LinkedHashMap
type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
@@ -508,24 +464,27 @@ trait Infer {
* type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
- @inline def keep(targ: Type, tparam: Symbol) = (
- targ.typeSymbol != NothingClass // definitely not retracting, it's not Nothing!
- || (!restpe.isWildcard && (varianceInType(restpe)(tparam) & COVARIANT) != 0)) // occured covariantly --> don't retract
-
- @inline def adjusted(targ: Type, tvar: TypeVar) =
- if (targ.typeSymbol == RepeatedParamClass)
- targ.baseType(SeqClass)
- else if (targ.typeSymbol == JavaRepeatedParamClass)
- targ.baseType(ArrayClass)
- // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
- else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden))
- targ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else
- targ.widen
+ val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
+
+ foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
+ val retract = (
+ targ.typeSymbol == NothingClass // only retract Nothings
+ && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ )
- (tparams, tvars, targs).zipped.map { (tparam, tvar, targ) =>
- tparam -> (if(keep(targ, tparam)) Some(adjusted(targ, tvar)) else None)
- }(collection.breakOut)
+ // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
+ buf += ((tparam,
+ if (retract) None
+ else Some(
+ if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
+ else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
+ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
+ else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+ else targ.widen
+ )
+ ))
+ }
+ buf.result
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -550,18 +509,6 @@ trait Infer {
throw new NoInstance("parameter lists differ in length")
val restpeInst = restpe.instantiateTypeParams(tparams, tvars)
- printInference(
- ptBlock("methTypeArgs",
- "tparams" -> tparams,
- "formals" -> formals,
- "restpe" -> restpe,
- "restpeInst" -> restpeInst,
- "argtpes" -> argtpes,
- "pt" -> pt,
- "tvars" -> tvars,
- "constraints" -> tvars.map(_.constr)
- )
- )
// first check if typevars can be fully defined from the expected type.
// The return value isn't used so I'm making it obvious that this side
@@ -584,7 +531,7 @@ trait Infer {
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
- (argtpes, formals).zipped map { (argtpe, formal) =>
+ map2(argtpes, formals) { (argtpe, formal) =>
val tp1 = argtpe.deconst.instantiateTypeParams(tparams, tvars)
val pt1 = formal.instantiateTypeParams(tparams, tvars)
@@ -599,17 +546,7 @@ trait Infer {
tvars, tparams, tparams map varianceInTypes(formals),
false, lubDepth(formals) max lubDepth(argtpes)
)
- val result = adjustTypeArgs(tparams, tvars, targs, restpe)
-
- printInference(
- ptBlock("methTypeArgs result",
- "tvars" -> tvars,
- "constraints" -> tvars.map(_.constr),
- "targs" -> targs,
- "adjusted type args" -> result
- )
- )
- result
+ adjustTypeArgs(tparams, tvars, targs, restpe)
}
private[typechecker] def followApply(tp: Type): Type = tp match {
@@ -703,13 +640,7 @@ trait Infer {
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case MethodType(params, _) =>
- val formals0 = params map { param =>
- param.tpe match {
- case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe
- case tpe => tpe
- }
- }
- val formals = formalTypes(formals0, argtpes0.length)
+ val formals = formalTypes(params map { _.tpe }, argtpes0.length)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -756,7 +687,8 @@ trait Infer {
typesCompatible(reorderArgs(argtpes1, argPos))
)
}
- } else {
+ }
+ else {
// not enough arguments, check if applicable using defaults
val missing = missingParams[Type](argtpes0, params, {
case NamedType(name, _) => Some(name)
@@ -780,25 +712,20 @@ trait Infer {
false
}
- /** Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ /**
+ * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ * The chance of TypeErrors should be reduced through context errors
*/
private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
argtpes0: List[Type], pt: Type): Boolean = {
- val reportAmbiguousErrors = context.reportAmbiguousErrors
- context.reportAmbiguousErrors = false
- try {
- isApplicable(undetparams, ftpe, argtpes0, pt)
- } catch {
- case ex: TypeError =>
- try {
- isApplicable(undetparams, ftpe, argtpes0, WildcardType)
- } catch {
- case ex: TypeError =>
- false
- }
- } finally {
- context.reportAmbiguousErrors = reportAmbiguousErrors
- }
+ val silentContext = context.makeSilent(false)
+ val typer0 = newTyper(silentContext)
+ val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+ if (pt != WildcardType && silentContext.hasErrors) {
+ silentContext.flushBuffer()
+ val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
+ if (silentContext.hasErrors) false else res2
+ } else res1
}
/** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
@@ -960,73 +887,31 @@ trait Infer {
*/
/** error if arguments not within bounds. */
- def checkBounds(pos: Position, pre: Type, owner: Symbol,
- tparams: List[Symbol], targs: List[Type], prefix: String) = {
+ def checkBounds(tree: Tree, pre: Type, owner: Symbol,
+ tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = {
//@M validate variances & bounds of targs wrt variances & bounds of tparams
//@M TODO: better place to check this?
//@M TODO: errors for getters & setters are reported separately
val kindErrors = checkKindBounds(tparams, targs, pre, owner)
- if (!kindErrors.isEmpty) {
- if (targs contains WildcardType) ()
- else error(pos,
- prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") +
- " do not conform to the expected kinds of the type parameters "+
- tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." +
- kindErrors.toList.mkString("\n", ", ", ""))
- }
- else if (!isWithinBounds(pre, owner, tparams, targs)) {
+ if(!kindErrors.isEmpty) {
+ if (targs contains WildcardType) true
+ else { KindBoundErrors(tree, prefix, targs, tparams, kindErrors); false }
+ } else if (!isWithinBounds(pre, owner, tparams, targs)) {
if (!(targs exists (_.isErroneous)) && !(tparams exists (_.isErroneous))) {
- //val bounds = instantiatedBounds(pre, owner, tparams, targs)//DEBUG
- //println("bounds = "+bounds+", targs = "+targs+", targclasses = "+(targs map (_.getClass))+", parents = "+(targs map (_.parents)))
- //println(List.map2(bounds, targs)((bound, targ) => bound containsType targ))
- error(pos,
- prefix + "type arguments " + targs.mkString("[", ",", "]") +
- " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
- (tparams map (_.defString)).mkString("[", ",", "]"))
- if (settings.explaintypes.value) {
- val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
- (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
- (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
- ()
- }
- }
- }
+ NotWithinBounds(tree, prefix, targs, tparams, kindErrors)
+ false
+ } else true
+ } else true
}
-
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- // @M TODO this method is duplicated all over the place (varianceString)
- def varStr(s: Symbol): String =
- if (s.isCovariant) "covariant"
- else if (s.isContravariant) "contravariant"
- else "invariant";
-
- def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
- if((a0 eq b0) || (a0.owner eq b0.owner)) ""
- else {
- var a = a0; var b = b0
- while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
- if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
- }
- }
-
- val errors = checkKindBounds0(tparams, targs, pre, owner, true)
- val errorMessages = new ListBuffer[String]
- errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages +=
- (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
- (for ((a, p) <- arityMismatches)
- yield a+qualify(a,p)+ " has "+countElementsAsString(a.typeParams.length, "type parameter")+", but "+
- p+qualify(p,a)+" has "+countAsString(p.typeParams.length)).toList.mkString(", ") +
- (for ((a, p) <- varianceMismatches)
- yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
- p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
- (for ((a, p) <- stricterBounds)
- yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
- p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", "))
+ checkKindBounds0(tparams, targs, pre, owner, true) map {
+ case (targ, tparam, kindErrors) =>
+ kindErrors.errorMessage(targ, tparam)
}
- errorMessages.toList
}
+
/** Substitute free type variables `undetparams` of polymorphic argument
* expression `tree`, given two prototypes `strictPt`, and `lenientPt`.
* `strictPt` is the first attempt prototype where type parameters
@@ -1099,8 +984,7 @@ trait Infer {
targs: List[Type], pt: Type) {
if (targs eq null) {
if (!tree.tpe.isErroneous && !pt.isErroneous)
- error(tree.pos, "polymorphic expression cannot be instantiated to expected type" +
- foundReqMsg(polyType(undetparams, skipImplicit(tree.tpe)), pt))
+ PolymorphicExpressionInstantiationError(tree, undetparams, pt)
} else {
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
}
@@ -1119,15 +1003,6 @@ trait Infer {
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
case MethodType(params0, _) =>
- printInference(
- ptBlock("inferMethodInstance",
- "fn" -> fn,
- "undetparams" -> undetparams,
- "args" -> args,
- "pt0" -> pt0
- )
- )
-
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(params0 map (_.tpe), args.length)
@@ -1137,47 +1012,36 @@ trait Infer {
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
- val treeSubst = new TreeTypeSubstituter(okparams, okargs)
- treeSubst traverseTrees fn :: args
-
- val result = leftUndet match {
- case Nil => Nil
- case xs =>
- // #3890
- val xs1 = treeSubst.typeMap mapOver xs
- if (xs ne xs1)
- new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args
-
- xs1
- }
- if (result.nonEmpty)
- printInference("inferMethodInstance, still undetermined: " + result)
-
- result
+ printInference("[infer method] solving for %s in %s based on (%s)%s (%s)".format(
+ undetparams.map(_.name).mkString(", "),
+ fn.tpe,
+ argtpes.mkString(", "),
+ restpe,
+ (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "")
+ ))
+
+ if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) {
+ val treeSubst = new TreeTypeSubstituter(okparams, okargs)
+ treeSubst traverseTrees fn :: args
+
+ leftUndet match {
+ case Nil => Nil
+ case xs =>
+ // #3890
+ val xs1 = treeSubst.typeMap mapOver xs
+ if (xs ne xs1)
+ new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args
+
+ xs1
+ }
+ } else Nil
}
catch ifNoInstance { msg =>
- errorTree(fn, "no type parameters for " +
- applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) +
- "\n --- because ---\n" + msg
- )
- Nil
+ NoMethodInstanceError(fn, args, msg); List()
}
}
- /** Type with all top-level occurrences of abstract types replaced by their bounds */
- def widen(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
- case TypeRef(_, sym, _) if sym.isAbstractType =>
- widen(tp.bounds.hi)
- case TypeRef(_, sym, _) if sym.isAliasType =>
- widen(tp.normalize)
- case rtp @ RefinedType(parents, decls) =>
- copyRefinedType(rtp, parents mapConserve widen, decls)
- case AnnotatedType(_, underlying, _) =>
- widen(underlying)
- case _ =>
- tp
- }
+ def widen(tp: Type): Type = abstractTypesToBounds(tp)
/** Substitute free type variables <code>undetparams</code> of type constructor
* <code>tree</code> in pattern, given prototype <code>pt</code>.
@@ -1198,20 +1062,16 @@ trait Infer {
try {
val targs = solvedTypes(tvars, undetparams, undetparams map varianceInType(restpe),
true, lubDepth(List(restpe, pt)))
-// checkBounds(tree.pos, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
+// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- } catch {
- case ex: NoInstance =>
- errorTree(tree, "constructor of type " + restpe +
- " cannot be uniquely instantiated to expected type " + pt +
- "\n --- because ---\n" + ex.getMessage())
+ } catch ifNoInstance{ msg =>
+ NoConstructorInstanceError(tree, restpe, pt, msg)
}
def instError = {
if (settings.debug.value) Console.println("ici " + tree + " " + undetparams + " " + pt)
if (settings.explaintypes.value) explainTypes(restpe.instantiateTypeParams(undetparams, tvars), pt)
- errorTree(tree, "constructor cannot be instantiated to expected type" +
- foundReqMsg(restpe, pt))
+ ConstrInstantiationError(tree, restpe, pt)
}
if (restpe.instantiateTypeParams(undetparams, tvars) <:< pt) {
computeArgs
@@ -1281,9 +1141,9 @@ trait Infer {
}
}
- def checkCheckable(pos: Position, tp: Type, kind: String) {
+ def checkCheckable(tree: Tree, tp: Type, kind: String) {
def patternWarning(tp0: Type, prefix: String) = {
- context.unit.uncheckedWarning(pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure")
+ context.unit.uncheckedWarning(tree.pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure")
}
def check(tp: Type, bound: List[Symbol]) {
def isLocalBinding(sym: Symbol) =
@@ -1302,7 +1162,7 @@ trait Infer {
} else if (sym.isAliasType) {
check(tp.normalize, bound)
} else if (sym == NothingClass || sym == NullClass || sym == AnyValClass) {
- error(pos, "type "+tp+" cannot be used in a type pattern or isInstanceOf test")
+ TypePatternOrIsInstanceTestError(tree, tp)
} else {
for (arg <- args) {
if (sym == ArrayClass) check(arg, bound)
@@ -1326,11 +1186,12 @@ trait Infer {
case ExistentialType(quantified, tp1) =>
check(tp1, bound ::: quantified)
case ThisType(_) =>
- ;
+ ()
case NoPrefix =>
- ;
+ ()
case _ =>
patternWarning(tp, "type ")
+ ()
}
}
check(tp, List())
@@ -1353,7 +1214,7 @@ trait Infer {
}
}
- def inferTypedPattern(pos: Position, pattp: Type, pt0: Type): Type = {
+ def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type): Type = {
val pt = widen(pt0)
val ptparams = freeTypeParamsOfTerms.collect(pt)
val tpparams = freeTypeParamsOfTerms.collect(pattp)
@@ -1365,10 +1226,12 @@ trait Infer {
* This is the case if the scrutinee has no unresolved type arguments
* and is a "final type", meaning final + invariant in all type parameters.
*/
- if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp)
- error(pos, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
+ if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) {
+ IncompatibleScrutineeTypeError(tree0, pattp, pt)
+ return ErrorType
+ }
- checkCheckable(pos, pattp, "pattern ")
+ checkCheckable(tree0, pattp, "pattern ")
if (pattp <:< pt) ()
else {
debuglog("free type params (1) = " + tpparams)
@@ -1391,8 +1254,8 @@ trait Infer {
if (isPopulated(tp, pt1) && isInstantiatable(tvars ++ ptvars) || pattpMatchesPt)
ptvars foreach instantiateTypeVar
else {
- error(pos, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt))
- return pattp
+ PatternTypeIncompatibleWithPtError1(tree0, pattp, pt)
+ return ErrorType
}
}
tvars foreach instantiateTypeVar
@@ -1413,30 +1276,8 @@ trait Infer {
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
if (pat.tpe <:< pt1)
ptvars foreach instantiateTypeVar
- else {
- val sym = pat.tpe.typeSymbol
- val clazz = sym.companionClass
- val addendum = (
- if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) {
- // TODO: move these somewhere reusable.
- val typeString = clazz.typeParams match {
- case Nil => "" + clazz.name
- case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
- }
- val caseString = (
- clazz.caseFieldAccessors
- map (_ => "_") // could use the actual param names here
- mkString (clazz.name + "(", ",", ")")
- )
- (
- "\nNote: if you intended to match against the class, try `case _: " +
- typeString + "` or `case " + caseString + "`"
- )
- }
- else ""
- )
- error(pat.pos, "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum)
- }
+ else
+ PatternTypeIncompatibleWithPtError2(pat, pt1, pt)
}
object toOrigin extends TypeMap {
@@ -1469,8 +1310,17 @@ trait Infer {
/** A traverser to collect type parameters referred to in a type
*/
object freeTypeParamsOfTerms extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean =
- sym.isAbstractType && sym.owner.isTerm
+ // An inferred type which corresponds to an unknown type
+ // constructor creates a file/declaration order-dependent crasher
+ // situation, the behavior of which depends on the state at the
+ // time the typevar is created. Until we can deal with these
+ // properly, we can avoid it by ignoring type parameters which
+ // have type constructors amongst their bounds. See SI-4070.
+ protected def includeCondition(sym: Symbol) = (
+ sym.isAbstractType
+ && sym.owner.isTerm
+ && !sym.info.bounds.exists(_.typeParams.nonEmpty)
+ )
}
/** A traverser to collect type parameters referred to in a type
@@ -1504,7 +1354,7 @@ trait Infer {
* If several alternatives match `pt`, take parameterless one.
* If no alternative matches `pt`, take the parameterless one anyway.
*/
- def inferExprAlternative(tree: Tree, pt: Type): Unit = tree.tpe match {
+ def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
case OverloadedType(pre, alts) => tryTwice {
val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
val secondTry = alts0.isEmpty
@@ -1535,15 +1385,10 @@ trait Infer {
case _ =>
}
}
- typeErrorTree(tree, tree.symbol.tpe, pt)
+ NoBestExprAlternativeError(tree, pt)
} else if (!competing.isEmpty) {
- if (secondTry) {
- typeErrorTree(tree, tree.symbol.tpe, pt)
- } else {
- if (!pt.isErroneous)
- context.ambiguousError(tree.pos, pre, best, competing.head, "expected type " + pt)
- setError(tree)
- }
+ if (secondTry) NoBestExprAlternativeError(tree, pt)
+ else { if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt) }
} else {
// val applicable = alts1 filter (alt =>
// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
@@ -1553,9 +1398,11 @@ trait Infer {
}
}
- @inline private def wrapTypeError(expr: => Boolean): Boolean =
- try expr
- catch { case _: TypeError => false }
+ @inline private def inSilentMode(expr: Typer => Boolean): Boolean = {
+ val silentContext = context.makeSilent(context.ambiguousErrors)
+ val res = expr(newTyper(silentContext))
+ if (silentContext.hasErrors) false else res
+ }
// Checks against the name of the parameter and also any @deprecatedName.
private def paramMatchesName(param: Symbol, name: Name) =
@@ -1625,9 +1472,7 @@ trait Infer {
val applicable = resolveOverloadedMethod(argtpes, {
alts filter { alt =>
- // TODO: this will need to be re-written once we substitute throwing exceptions
- // with generating error trees. We wrap this applicability in try/catch because of #4457.
- wrapTypeError(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
+ inSilentMode(typer0 => typer0.infer.isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
(!varArgsOnly || isVarArgsList(alt.tpe.params))
}
})
@@ -1643,16 +1488,13 @@ trait Infer {
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType) {
- errorTree(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
- } else {
+ if (pt == WildcardType)
+ NoBestMethodAlternativeError(tree, argtpes, pt)
+ else
inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
- }
} else if (!competing.isEmpty) {
if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous)
- context.ambiguousError(tree.pos, pre, best, competing.head,
- "argument types " + argtpes.mkString("(", ",", ")") +
- (if (pt == WildcardType) "" else " and expected result type " + pt))
+ AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt)
setError(tree)
()
} else {
@@ -1668,18 +1510,27 @@ trait Infer {
*
* @param infer ...
*/
- def tryTwice(infer: => Unit) {
+ def tryTwice(infer: => Unit): Unit = {
if (context.implicitsEnabled) {
- val reportGeneralErrors = context.reportGeneralErrors
- context.reportGeneralErrors = false
- try context.withImplicitsDisabled(infer)
- catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError =>
- context.reportGeneralErrors = reportGeneralErrors
+ val saved = context.state
+ var fallback = false
+ context.setBufferErrors()
+ val res = try {
+ context.withImplicitsDisabled(infer)
+ if (context.hasErrors) {
+ fallback = true
+ context.restoreState(saved)
+ context.flushBuffer()
infer
+ }
+ } catch {
+ case ex: CyclicReference => throw ex
+ case ex: TypeError => // recoverable cyclic references
+ context.restoreState(saved)
+ if (!fallback) infer else ()
}
- context.reportGeneralErrors = reportGeneralErrors
+ context.restoreState(saved)
+ res
}
else infer
}
@@ -1694,13 +1545,13 @@ trait Infer {
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
- def fail(msg: String): Unit = error(tree.pos, msg)
+ def fail(kind: PolyAlternativeErrorKind.ErrorType) =
+ PolyAlternativeError(tree, argtypes, sym0, kind)
- if (sym0 == NoSymbol) return fail(
+ if (sym0 == NoSymbol) return (
if (alts exists (_.typeParams.nonEmpty))
- "wrong number of type parameters for " + treeSymTypeMsg(tree)
- else treeSymTypeMsg(tree) + " does not take type parameters"
- )
+ fail(PolyAlternativeErrorKind.WrongNumber)
+ else fail(PolyAlternativeErrorKind.NoParams))
val (resSym, resTpe) = {
if (!sym0.isOverloaded)
@@ -1708,11 +1559,8 @@ trait Infer {
else {
val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
if (sym == NoSymbol) {
- if (argtypes forall (x => !x.isErroneous)) fail(
- "type arguments " + argtypes.mkString("[", ",", "]") +
- " conform to the bounds of none of the overloaded alternatives of\n "+sym0+
- ": "+sym0.info
- )
+ if (argtypes forall (x => !x.isErroneous))
+ fail(PolyAlternativeErrorKind.ArgsDoNotConform)
return
}
else if (sym.isOverloaded) {
@@ -1729,24 +1577,6 @@ trait Infer {
// Side effects tree with symbol and type
tree setSymbol resSym setType resTpe
}
-
- abstract class TreeForwarder(forwardTo: Tree) extends Tree {
- override def pos = forwardTo.pos
- override def hasSymbol = forwardTo.hasSymbol
- override def symbol = forwardTo.symbol
- override def symbol_=(x: Symbol) = forwardTo.symbol = x
- }
-
- case class AccessError(tree: Tree, sym: Symbol, pre: Type, explanation: String) extends TreeForwarder(tree) {
- setError(this)
-
- // @PP: It is improbable this logic shouldn't be in use elsewhere as well.
- private def location = if (sym.isClassConstructor) context.enclClass.owner else pre.widen
- def emit(): Tree = {
- val realsym = underlyingSymbol(sym)
- errorTree(tree, realsym.fullLocationString + " cannot be accessed in " + location + explanation)
- }
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 99ba0e0971..470f3e7117 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -2,59 +2,79 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
+import scala.tools.nsc.util._
+import scala.reflect.ReflectionUtils
trait Macros { self: Analyzer =>
import global._
import definitions._
- def macroMethName(name: Name) =
- newTermName((if (name.isTypeName) "type" else "def") + "macro$" + name)
-
def macroMeth(mac: Symbol): Symbol = {
var owner = mac.owner
if (!owner.isModuleClass) owner = owner.companionModule.moduleClass
- owner.info.decl(macroMethName(mac.name))
+ owner.info.decl(nme.macroMethodName(mac.name))
+ }
+
+ def macroArgs(tree: Tree): (List[List[Tree]]) = tree match {
+ case Apply(fn, args) =>
+ macroArgs(fn) :+ args
+ case TypeApply(fn, args) =>
+ macroArgs(fn) :+ args
+ case Select(qual, name) =>
+ List(List(qual))
+ case _ =>
+ List(List())
}
/**
- * The definition of the method implementing a macro. Example:
+ * The definition of the method implementing a macro. Example:
* Say we have in a class C
*
* def macro foo[T](xs: List[T]): T = expr
*
* Then the following macro method is generated for `foo`:
*
- * def defmacro$foo(glob: scala.reflect.api.Universe)
- * (_this: glob.Tree)
- * (T: glob.Type)
- * (xs: glob.Tree): glob.Tree = {
- * implicit val $glob = glob
+ * def defmacro$foo
+ * (_context: scala.reflect.macro.Context)
+ * (_this: _context.Tree)
+ * (T: _context.TypeTree)
+ * (xs: _context.Tree): _context.Tree = {
+ * import _context._ // this means that all methods of Context can be used unqualified in macro's body
* expr
* }
*
- * If `foo` is declared in an object, the second parameter list is () instead of (_this: glob.Tree).
+ * If macro has no type arguments, the third parameter list is omitted (it's not empty, but omitted altogether).
+ *
+ * To find out the desugared representation of your particular macro, compile it with -Ymacro-debug.
*/
def macroMethDef(mdef: DefDef): Tree = {
def paramDef(name: Name, tpt: Tree) = ValDef(Modifiers(PARAM), name, tpt, EmptyTree)
- val universeType = TypeTree(ReflectApiUniverse.tpe)
- val globParamSec = List(paramDef("glob", universeType))
- def globSelect(name: Name) = Select(Ident("glob"), name)
- def globTree = globSelect(newTypeName("Tree"))
- def globType = globSelect(newTypeName("Type"))
- val thisParamSec = if (mdef.symbol.owner.isModuleClass) List() else List(paramDef("_this", globTree))
- def tparamInMacro(tdef: TypeDef) = paramDef(tdef.name.toTermName, globType)
- def vparamInMacro(vdef: ValDef): ValDef = paramDef(vdef.name, globTree)
+ val contextType = TypeTree(ReflectMacroContext.tpe)
+ val globParamSec = List(paramDef(nme.macroContext, contextType))
+ def globSelect(name: Name) = Select(Ident(nme.macroContext), name)
+ def globTree = globSelect(tpnme.Tree)
+ def globTypeTree = globSelect(tpnme.TypeTree)
+ val thisParamSec = List(paramDef(newTermName(nme.macroThis), globTree))
+ def tparamInMacro(tdef: TypeDef) = paramDef(tdef.name.toTermName, globTypeTree)
+ def vparamInMacro(vdef: ValDef): ValDef = paramDef(vdef.name, vdef.tpt match {
+ case tpt @ AppliedTypeTree(hk, _) if treeInfo.isRepeatedParamType(tpt) => AppliedTypeTree(hk, List(globTree))
+ case _ => globTree
+ })
def wrapImplicit(tree: Tree) = atPos(tree.pos) {
- Block(List(ValDef(Modifiers(IMPLICIT), "$glob", universeType, Ident("glob"))), tree)
+ // implicit hasn't proven useful so far, so I'm disabling it
+ //val implicitDecl = ValDef(Modifiers(IMPLICIT), nme.macroContextImplicit, SingletonTypeTree(Ident(nme.macroContext)), Ident(nme.macroContext))
+ val importGlob = Import(Ident(nme.macroContext), List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ Block(List(importGlob), tree)
}
+ var formals = (mdef.vparamss map (_ map vparamInMacro))
+ if (mdef.tparams.nonEmpty) formals = (mdef.tparams map tparamInMacro) :: formals
atPos(mdef.pos) {
new DefDef( // can't call DefDef here; need to find out why
- mods = mdef.mods &~ MACRO,
- name = macroMethName(mdef.name),
+ mods = mdef.mods &~ MACRO &~ OVERRIDE,
+ name = nme.macroMethodName(mdef.name),
tparams = List(),
- vparamss = globParamSec :: thisParamSec :: (mdef.tparams map tparamInMacro) ::
- (mdef.vparamss map (_ map vparamInMacro)),
+ vparamss = globParamSec :: thisParamSec :: formals,
tpt = globTree,
wrapImplicit(mdef.rhs))
}
@@ -62,11 +82,158 @@ trait Macros { self: Analyzer =>
def addMacroMethods(templ: Template, namer: Namer): Unit = {
for (ddef @ DefDef(mods, _, _, _, _, _) <- templ.body if mods hasFlag MACRO) {
- val sym = namer.enterSyntheticSym(util.trace("macro def: ")(macroMethDef(ddef)))
- println("added to "+namer.context.owner.enclClass+": "+sym)
+ val trace = scala.tools.nsc.util.trace when settings.Ymacrodebug.value
+ val sym = namer.enterSyntheticSym(trace("macro def: ")(macroMethDef(ddef)))
+ trace("added to "+namer.context.owner.enclClass+": ")(sym)
+ }
+ }
+
+ lazy val mirror = new scala.reflect.runtime.Mirror {
+ lazy val libraryClassLoader = {
+ val classpath = global.classPath.asURLs
+ ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ }
+
+ override def defaultReflectiveClassLoader() = libraryClassLoader
+ }
+
+ /** Return optionally address of companion object and implementation method symbol
+ * of given macro; or None if implementation classfile cannot be loaded or does
+ * not contain the macro implementation.
+ */
+ def macroImpl(mac: Symbol): Option[(AnyRef, mirror.Symbol)] = {
+ val debug = settings.Ymacrodebug.value
+ val trace = scala.tools.nsc.util.trace when debug
+ trace("looking for macro implementation: ")(mac.fullNameString)
+
+ try {
+ val mmeth = macroMeth(mac)
+ trace("found implementation at: ")(mmeth.fullNameString)
+
+ if (mmeth == NoSymbol) None
+ else {
+ val receiverClass: mirror.Symbol = mirror.symbolForName(mmeth.owner.fullName)
+ if (debug) {
+ println("receiverClass is: " + receiverClass.fullNameString)
+
+ val jreceiverClass = mirror.classToJava(receiverClass)
+ val jreceiverSource = jreceiverClass.getProtectionDomain.getCodeSource
+ println("jreceiverClass is %s from %s".format(jreceiverClass, jreceiverSource))
+
+ val jreceiverClasspath = jreceiverClass.getClassLoader match {
+ case cl: java.net.URLClassLoader => "[" + (cl.getURLs mkString ",") + "]"
+ case _ => "<unknown>"
+ }
+ println("jreceiverClassLoader is %s with classpath %s".format(jreceiverClass.getClassLoader, jreceiverClasspath))
+ }
+
+ val receiverObj = receiverClass.companionModule
+ trace("receiverObj is: ")(receiverObj.fullNameString)
+
+ if (receiverObj == mirror.NoSymbol) None
+ else {
+ val receiver = mirror.companionInstance(receiverClass)
+ val rmeth = receiverObj.info.member(mirror.newTermName(mmeth.name.toString))
+ if (debug) {
+ println("rmeth is: " + rmeth.fullNameString)
+ println("jrmeth is: " + mirror.methodToJava(rmeth))
+ }
+
+ if (rmeth == mirror.NoSymbol) None
+ else {
+ Some((receiver, rmeth))
+ }
+ }
+ }
+ } catch {
+ case ex: ClassNotFoundException =>
+ None
}
}
- def macroExpand(tree: Tree): Tree = ???
+ /** Return result of macro expansion.
+ * Or, if that fails, and the macro overrides a method return
+ * tree that calls this method instead of the macro.
+ */
+ def macroExpand(tree: Tree, context: Context): Option[Any] = {
+ val trace = scala.tools.nsc.util.trace when settings.Ymacrodebug.value
+ trace("macroExpand: ")(tree)
-} \ No newline at end of file
+ val macroDef = tree.symbol
+ macroImpl(macroDef) match {
+ case Some((receiver, rmeth)) =>
+ val argss = List(global) :: macroArgs(tree)
+ val paramss = macroMeth(macroDef).paramss
+ trace("paramss: ")(paramss)
+ val rawArgss = for ((as, ps) <- argss zip paramss) yield {
+ if (isVarArgsList(ps)) as.take(ps.length - 1) :+ as.drop(ps.length - 1)
+ else as
+ }
+ val rawArgs: Seq[Any] = rawArgss.flatten
+ trace("rawArgs: ")(rawArgs)
+ val savedInfolevel = nodePrinters.infolevel
+ try {
+ // @xeno.by: InfoLevel.Verbose examines and prints out infos of symbols
+ // by the means of this'es these symbols can climb up the lexical scope
+ // when these symbols will be examined by a node printer
+ // they will enumerate and analyze their children (ask for infos and tpes)
+ // if one of those children involves macro expansion, things might get nasty
+ // that's why I'm temporarily turning this behavior off
+ nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
+ Some(mirror.invoke(receiver, rmeth)(rawArgs: _*))
+ } catch {
+ case ex =>
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ val msg = if (settings.Ymacrodebug.value) {
+ val stacktrace = new java.io.StringWriter()
+ realex.printStackTrace(new java.io.PrintWriter(stacktrace))
+ System.getProperty("line.separator") + stacktrace
+ } else {
+ realex.getMessage
+ }
+ context.unit.error(tree.pos, "exception during macro expansion: " + msg)
+ None
+ } finally {
+ nodePrinters.infolevel = savedInfolevel
+ }
+ case None =>
+ def notFound() = {
+ context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name)
+ None
+ }
+ def fallBackToOverridden(tree: Tree): Option[Tree] = {
+ tree match {
+ case Select(qual, name) if (macroDef.isMacro) =>
+ macroDef.allOverriddenSymbols match {
+ case first :: _ =>
+ Some(Select(qual, name) setPos tree.pos setSymbol first)
+ case _ =>
+ trace("macro is not overridden: ")(tree)
+ notFound()
+ }
+ case Apply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
+ case TypeApply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
+ case _ =>
+ trace("unexpected tree in fallback: ")(tree)
+ notFound()
+ }
+ }
+ fallBackToOverridden(tree) match {
+ case Some(tree1) =>
+ trace("falling back to ")(tree1)
+ currentRun.macroExpansionFailed = true
+ Some(tree1)
+ case None =>
+ None
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index d75e119fd7..915d7a98db 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -7,22 +7,7 @@ package typechecker
import symtab.Flags._
import scala.collection.{ mutable, immutable }
-
-object listutil {
- def mexists[T](xss: List[List[T]])(p: T => Boolean) =
- xss exists (_ exists p)
- def mmap[T, U](xss: List[List[T]])(f: T => U) =
- xss map (_ map f)
- def mforeach[T](xss: List[List[T]])(f: T => Unit) =
- xss foreach (_ foreach f)
- def mfind[T](xss: List[List[T]])(p: T => Boolean): Option[T] = {
- for (xs <- xss; x <- xs)
- if (p(x)) return Some(x)
- None
- }
- def mfilter[T](xss: List[List[T]])(p: T => Boolean) =
- for (xs <- xss; x <- xs; if p(x)) yield x
-}
+import scala.tools.util.StringOps.{ ojoin }
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -32,6 +17,139 @@ trait MethodSynthesis {
import global._
import definitions._
+ import CODE._
+
+ object synthesisUtil {
+ type M[T] = Manifest[T]
+ type CM[T] = ClassManifest[T]
+
+ def ValOrDefDef(sym: Symbol, body: Tree) =
+ if (sym.isLazy) ValDef(sym, body)
+ else DefDef(sym, body)
+
+ def applyTypeInternal(manifests: List[M[_]]): Type = {
+ val symbols = manifests map manifestToSymbol
+ val container :: args = symbols
+ val tparams = container.typeConstructor.typeParams
+
+ // Conservative at present - if manifests were more usable this could do a lot more.
+ require(symbols forall (_ ne NoSymbol), "Must find all manifests: " + symbols)
+ require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
+ require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
+
+ typeRef(container.typeConstructor.prefix, container, args map (_.tpe))
+ }
+
+ def companionType[T](implicit m: M[T]) =
+ getRequiredModule(m.erasure.getName).tpe
+
+ // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
+ def applyType[CC](implicit m1: M[CC]): Type =
+ applyTypeInternal(List(m1))
+
+ def applyType[CC[X1], X1](implicit m1: M[CC[_]], m2: M[X1]): Type =
+ applyTypeInternal(List(m1, m2))
+
+ def applyType[CC[X1, X2], X1, X2](implicit m1: M[CC[_,_]], m2: M[X1], m3: M[X2]): Type =
+ applyTypeInternal(List(m1, m2, m3))
+
+ def applyType[CC[X1, X2, X3], X1, X2, X3](implicit m1: M[CC[_,_,_]], m2: M[X1], m3: M[X2], m4: M[X3]): Type =
+ applyTypeInternal(List(m1, m2, m3, m4))
+
+ def newMethodType[F](owner: Symbol)(implicit m: Manifest[F]): Type = {
+ val fnSymbol = manifestToSymbol(m)
+ assert(fnSymbol isSubClass FunctionClass(m.typeArguments.size - 1), (owner, m))
+ val symbols = m.typeArguments map (m => manifestToSymbol(m))
+ val formals = symbols.init map (_.typeConstructor)
+ val params = owner newSyntheticValueParams formals
+
+ MethodType(params, symbols.last.typeConstructor)
+ }
+ }
+ import synthesisUtil._
+
+ class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
+ private def isOverride(name: TermName) =
+ clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
+
+ def newMethodFlags(name: TermName) = {
+ val overrideFlag = if (isOverride(name)) OVERRIDE else 0L
+ overrideFlag | SYNTHETIC
+ }
+ def newMethodFlags(method: Symbol) = {
+ val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+ (method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
+ }
+
+ private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
+ logResult("finishMethod")(localTyper typed ValOrDefDef(method, f(method)))
+
+ private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
+ val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ finishMethod(m setInfoAndEnter info, f)
+ }
+ private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
+ val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ finishMethod(m setInfoAndEnter infoFn(m), f)
+ }
+ private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
+ val m = original.cloneSymbol(clazz, newMethodFlags(original)) setPos clazz.pos.focus
+ m.name = name
+ finishMethod(clazz.info.decls enter m, f)
+ }
+
+ private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
+ cloneInternal(original, f, original.name)
+
+ def clazzMember(name: Name) = clazz.info nonPrivateMember name
+ def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
+
+ /** Function argument takes the newly created method symbol of
+ * the same type as `name` in clazz, and returns the tree to be
+ * added to the template.
+ */
+ def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
+ overrideMethod(clazzMember(name))(f)
+
+ def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
+ cloneInternal(original, sym => f(sym setFlag OVERRIDE))
+
+ def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
+ cloneInternal(original, f, nameFn(original.name))
+
+ def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree =
+ createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType))
+
+ def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree =
+ createInternal(name, f, NullaryMethodType(returnType))
+
+ def createMethod(original: Symbol)(f: Symbol => Tree): Tree =
+ createInternal(original.name, f, original.info)
+
+ def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree =
+ createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
+
+ def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
+ createMethod(name, List(IntClass.tpe), returnType) { m =>
+ val arg0 = Ident(m.firstParam)
+ val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
+ val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
+
+ Match(arg0, cases)
+ }
+ }
+
+ // def foo() = constant
+ def constantMethod(name: Name, value: Any): Tree = {
+ val constant = Constant(value)
+ createMethod(name, Nil, constant.tpe)(_ => Literal(constant))
+ }
+ // def foo = constant
+ def constantNullary(name: Name, value: Any): Tree = {
+ val constant = Constant(value)
+ createMethod(name, constant.tpe)(_ => Literal(constant))
+ }
+ }
/** There are two key methods in here.
*
@@ -47,10 +165,12 @@ trait MethodSynthesis {
trait MethodSynth {
self: Namer =>
+ import NamerErrorGen._
+
def enterGetterSetter(tree: ValDef) {
val ValDef(mods, name, _, _) = tree
if (nme.isSetterName(name))
- context.error(tree.pos, "Names of vals or vars may not end in `_='")
+ ValOrValWithSetterSuffixError(tree)
val getter = Getter(tree).createAndEnterSymbol()
@@ -58,7 +178,7 @@ trait MethodSynthesis {
if (mods.isLazy) enterLazyVal(tree, getter)
else {
if (mods.isPrivateLocal)
- context.error(tree.pos, "private[this] not allowed for case class parameters")
+ PrivateThisCaseClassParameterError(tree)
// Create the setter if necessary.
if (mods.isMutable)
Setter(tree).createAndEnterSymbol()
@@ -140,11 +260,7 @@ trait MethodSynthesis {
def keepClean = false // whether annotations whose definitions are not meta-annotated should be kept.
def validate() { }
def createAndEnterSymbol(): Symbol = {
- val sym = (
- owner.newMethod(tree.pos.focus, name)
- setFlag tree.mods.flags & flagsMask
- setFlag flagsExtra
- )
+ val sym = owner.newMethod(name, tree.pos.focus, (tree.mods.flags & flagsMask) | flagsExtra)
setPrivateWithin(tree, sym)
enterInScope(sym)
sym setInfo completer(sym)
@@ -166,8 +282,9 @@ trait MethodSynthesis {
}
}
private def logDerived(result: Tree): Tree = {
- val id = List(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName) filterNot (_ == "") mkString " "
- log("[+derived] " + id + " (" + derivedSym + ")\n " + result)
+ log("[+derived] " + ojoin(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName.decode)
+ + " (" + derivedSym + ")\n " + result)
+
result
}
final def derive(initial: List[AnnotationInfo]): Tree = {
@@ -205,26 +322,39 @@ trait MethodSynthesis {
override def validate() {
assert(derivedSym != NoSymbol, tree)
if (derivedSym.isOverloaded)
- context.error(derivedSym.pos, derivedSym+" is defined twice")
+ GetterDefinedTwiceError(derivedSym)
super.validate()
}
- // keep type tree of original abstract field
- private def fixTypeTree(dd: DefDef): DefDef = {
- dd.tpt match {
- case tt: TypeTree if dd.rhs == EmptyTree =>
- tt setOriginal tree.tpt
- case tpt =>
- tpt setPos tree.tpt.pos.focus
- }
- dd
- }
override def derivedTree: DefDef = {
- fixTypeTree {
- DefDef(derivedSym,
- if (mods.isDeferred) EmptyTree
- else gen.mkCheckInit(fieldSelection)
- )
+ // For existentials, don't specify a type for the getter, even one derived
+ // from the symbol! This leads to incompatible existentials for the field and
+ // the getter. Let the typer do all the work. You might think "why only for
+ // existentials, why not always," and you would be right, except: a single test
+ // fails, but it looked like some work to deal with it. Test neg/t0606.scala
+ // starts compiling (instead of failing like it's supposed to) because the typer
+ // expects to be able to identify escaping locals in typedDefDef, and fails to
+ // spot that brand of them. In other words it's an artifact of the implementation.
+ val tpt = derivedSym.tpe.finalResultType match {
+ case ExistentialType(_, _) => TypeTree()
+ case tp => TypeTree(tp)
+ }
+ tpt setPos focusPos(derivedSym.pos)
+ // keep type tree of original abstract field
+ if (mods.isDeferred)
+ tpt setOriginal tree.tpt
+
+ // TODO - reconcile this with the DefDef creator in Trees (which
+ // at this writing presented no way to pass a tree in for tpt.)
+ atPos(derivedSym.pos) {
+ DefDef(
+ Modifiers(derivedSym.flags),
+ derivedSym.name.toTermName,
+ Nil,
+ Nil,
+ tpt,
+ if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
+ ) setSymbol derivedSym
}
}
}
@@ -262,7 +392,7 @@ trait MethodSynthesis {
}
sealed abstract class BeanAccessor(bean: String) extends DerivedFromValDef {
- def name = bean + tree.name.toString.capitalize
+ val name = newTermName(bean + tree.name.toString.capitalize)
def flagsMask = BeanPropertyFlags
def flagsExtra = 0
override def derivedSym = enclClass.info decl name
@@ -273,8 +403,7 @@ trait MethodSynthesis {
if (derivedSym == NoSymbol) {
// the namer decides whether to generate these symbols or not. at that point, we don't
// have symbolic information yet, so we only look for annotations named "BeanProperty".
- context.error(tree.pos,
- "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import")
+ BeanPropertyAnnotationLimitationError(tree)
}
super.validate()
}
@@ -322,9 +451,9 @@ trait MethodSynthesis {
val beans = beanAccessorsFromNames(tree)
if (beans.nonEmpty) {
if (!name(0).isLetter)
- context.error(tree.pos, "`BeanProperty' annotation can be applied only to fields that start with a letter")
+ BeanPropertyAnnotationFieldWithoutLetterError(tree)
else if (mods.isPrivate) // avoids name clashes with private fields in traits
- context.error(tree.pos, "`BeanProperty' annotation can be applied only to non-private fields")
+ BeanPropertyAnnotationPrivateFieldError(tree)
// Create and enter the symbols here, add the trees in finishGetterSetter.
beans foreach (_.createAndEnterSymbol())
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 364e887939..51542ec757 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -73,7 +73,9 @@ trait Namers extends MethodSynthesis {
classAndNamerOfModule.clear()
}
- abstract class Namer(val context: Context) extends MethodSynth {
+ abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors {
+
+ import NamerErrorGen._
val typer = newTyper(context)
private lazy val innerNamer =
@@ -87,7 +89,7 @@ trait Namers extends MethodSynthesis {
newNamer(context.makeNewScope(tree, sym))
}
def createInnerNamer() = {
- newNamer(context.make(context.tree, owner, new Scope))
+ newNamer(context.make(context.tree, owner, newScope))
}
def createPrimaryConstructorParameterNamer: Namer = { //todo: can we merge this with SCCmode?
val classContext = context.enclClass
@@ -99,7 +101,7 @@ trait Namers extends MethodSynthesis {
}
def enterValueParams(vparamss: List[List[ValDef]]): List[List[Symbol]] = {
- listutil.mmap(vparamss) { param =>
+ mmap(vparamss) { param =>
val sym = assignSymbol(param, param.name, mask = ValueParameterFlags)
setPrivateWithin(param, sym)
enterInScope(sym)
@@ -109,9 +111,10 @@ trait Namers extends MethodSynthesis {
protected def owner = context.owner
private def contextFile = context.unit.source.file
- private def typeErrorHandler[T](pos: Position, alt: T): PartialFunction[Throwable, T] = {
+ private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
case ex: TypeError =>
- typer.reportTypeError(pos, ex)
+ // H@ need to ensure that we handle only cyclic references
+ TypeSigError(tree, ex)
alt
}
// PRIVATE | LOCAL are fields generated for primary constructor arguments
@@ -129,10 +132,17 @@ trait Namers extends MethodSynthesis {
|| vd.symbol.isLazy
)
- def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym = {
+ def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym =
if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
- else sym setPrivateWithin typer.qualifyingClass(tree, mods.privateWithin, true)
- }
+ else sym setPrivateWithin (
+ typer.qualifyingClass(tree, mods.privateWithin, true) match {
+ case None =>
+ NoSymbol
+ case Some(sym) =>
+ sym
+ }
+ )
+
def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol =
setPrivateWithin(tree, sym, tree.mods)
@@ -195,14 +205,6 @@ trait Namers extends MethodSynthesis {
)
)
- private def doubleDefError(pos: Position, sym: Symbol) {
- val s1 = if (sym.isModule) "case class companion " else ""
- val s2 = if (sym.isSynthetic) "(compiler-generated) " + s1 else ""
- val s3 = if (sym.isCase) "case class " + sym.name else "" + sym
-
- context.error(pos, sym.name + " is already defined as " + s2 + s3)
- }
-
private def allowsOverload(sym: Symbol) = (
sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
)
@@ -221,7 +223,7 @@ trait Namers extends MethodSynthesis {
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) {
- doubleDefError(sym.pos, prev.sym)
+ DoubleDefError(sym, prev.sym)
sym setInfo ErrorType
scope unlink prev.sym // let them co-exist...
// FIXME: The comment "let them co-exist" is confusing given that the
@@ -250,7 +252,7 @@ trait Namers extends MethodSynthesis {
returnContext
}
tree.symbol match {
- case NoSymbol => try dispatch() catch typeErrorHandler(tree.pos, this.context)
+ case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context)
case sym => enterExistingSym(sym)
}
}
@@ -280,7 +282,7 @@ trait Namers extends MethodSynthesis {
}
private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
- log("[+symbol] " + sym.hasFlagsToString(-1L) + " " + sym)
+ log("[+symbol] " + sym.debugLocationString)
tree.symbol = sym
sym
}
@@ -293,23 +295,22 @@ trait Namers extends MethodSynthesis {
private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
val pos = tree.pos
val isParameter = tree.mods.isParameter
- val sym = tree match {
- case TypeDef(_, _, _, _) if isParameter => owner.newTypeParameter(pos, name.toTypeName)
- case TypeDef(_, _, _, _) => owner.newAliasType(pos, name.toTypeName)
- case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos)
- case DefDef(_, _, _, _, _, _) => owner.newMethod(pos, name.toTermName)
- case ClassDef(_, _, _, _) => owner.newClass(pos, name.toTypeName)
- case ModuleDef(_, _, _) => owner.newModule(pos, name)
- case ValDef(_, _, _, _) if isParameter => owner.newValueParameter(pos, name)
+ val flags = tree.mods.flags & mask
+
+ tree match {
+ case TypeDef(_, _, _, _) if isParameter => owner.newTypeParameter(name.toTypeName, pos, flags)
+ case TypeDef(_, _, _, _) => owner.newTypeSymbol(name.toTypeName, pos, flags)
+ case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
+ case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags)
+ case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags)
+ case ModuleDef(_, _, _) => owner.newModule(name, pos, flags)
+ case ValDef(_, _, _, _) if isParameter => owner.newValueParameter(name, pos, flags)
case PackageDef(pid, _) => createPackageSymbol(pos, pid)
- case ValDef(_, _, _, _) => owner.newValue(pos, name)
+ case ValDef(_, _, _, _) => owner.newValue(name, pos, flags)
}
- sym setFlag (tree.mods.flags & mask)
}
- private def createFieldSymbol(tree: ValDef): TermSymbol = (
- owner.newValue(tree.pos, nme.getterToLocal(tree.name))
- setFlag tree.mods.flags & FieldFlags | PrivateLocal
- )
+ private def createFieldSymbol(tree: ValDef): TermSymbol =
+ owner.newValue(nme.getterToLocal(tree.name), tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
private def createImportSymbol(tree: Tree) =
NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
@@ -325,7 +326,7 @@ trait Namers extends MethodSynthesis {
if (existing.isPackage && pkgOwner == existing.owner)
existing
else {
- val pkg = pkgOwner.newPackage(pos, pid.name.toTermName)
+ val pkg = pkgOwner.newPackage(pid.name.toTermName, pos)
val pkgClass = pkg.moduleClass
val pkgClassInfo = new PackageClassInfoType(newPackageScope(pkgClass), pkgClass)
@@ -448,6 +449,7 @@ trait Namers extends MethodSynthesis {
}
private def checkSelectors(tree: Import): Unit = {
+ import DuplicatesErrorKinds._
val Import(expr, selectors) = tree
val base = expr.tpe
@@ -484,8 +486,10 @@ trait Namers extends MethodSynthesis {
typeSig(tree)
}
// for Java code importing Scala objects
- else if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from)))
- notAMemberError(tree.pos, expr, from)
+ else if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+ typer.TyperErrorGen.NotAMemberError(tree, expr, from)
+ typer.infer.setError(tree)
+ }
}
// Setting the position at the import means that if there is
// more than one hidden name, the second will not be warned.
@@ -493,20 +497,21 @@ trait Namers extends MethodSynthesis {
checkNotRedundant(tree.pos withPoint fromPos, from, to)
}
}
- def noDuplicates(names: List[Name], message: String) {
+
+ def noDuplicates(names: List[Name], check: DuplicatesErrorKinds.Value) {
def loop(xs: List[Name]): Unit = xs match {
case Nil => ()
case hd :: tl =>
if (hd == nme.WILDCARD || !(tl contains hd)) loop(tl)
- else context.error(tree.pos, hd.decode + " " + message)
+ else DuplicatesError(tree, hd, check)
}
loop(names filterNot (x => x == null || x == nme.WILDCARD))
}
selectors foreach checkSelector
// checks on the whole set
- noDuplicates(selectors map (_.name), "is renamed twice")
- noDuplicates(selectors map (_.rename), "appears twice as a target of a renaming")
+ noDuplicates(selectors map (_.name), RenamedTwice)
+ noDuplicates(selectors map (_.rename), AppearsTwice)
}
def enterCopyMethodOrGetter(tree: Tree, tparams: List[TypeDef]): Symbol = {
@@ -522,13 +527,13 @@ trait Namers extends MethodSynthesis {
val vparamss = tree match { case x: DefDef => x.vparamss ; case _ => Nil }
val cparamss = constructorType.paramss
- for ((vparams, cparams) <- vparamss zip cparamss) {
- for ((param, cparam) <- vparams zip cparams) {
+ map2(vparamss, cparamss)((vparams, cparams) =>
+ map2(vparams, cparams)((param, cparam) =>
// need to clone the type cparam.tpe???
// problem is: we don't have the new owner yet (the new param symbol)
param.tpt setType subst(cparam.tpe)
- }
- }
+ )
+ )
}
sym setInfo {
mkTypeCompleter(tree) { copySym =>
@@ -579,7 +584,7 @@ trait Namers extends MethodSynthesis {
// via "x$lzy" as can be seen in test #3927.
val sym = (
if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.pos, tree.name + "$lzy") setFlag tree.mods.flags resetFlag IMPLICIT
+ else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
)
enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
}
@@ -621,13 +626,13 @@ trait Namers extends MethodSynthesis {
if (mods.isCase) {
if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
- context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
+ MaxParametersCaseClassError(tree)
val m = ensureCompanionObject(tree, caseModuleDef)
classOfModuleClass(m.moduleClass) = new WeakReference(tree)
}
val hasDefault = impl.body exists {
- case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => listutil.mexists(vparamss)(_.mods.hasDefault)
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
case _ => false
}
if (hasDefault) {
@@ -824,7 +829,7 @@ trait Namers extends MethodSynthesis {
val tp = tpt.tpe
val inheritsSelf = tp.typeSymbol == owner
if (inheritsSelf)
- context.error(tpt.pos, ""+tp.typeSymbol+" inherits itself")
+ InheritsItselfError(tpt)
if (inheritsSelf || tp.isError) AnyRefClass.tpe
else tp
@@ -833,7 +838,7 @@ trait Namers extends MethodSynthesis {
val parents = typer.parentTypes(templ) map checkParent
enterSelf(templ.self)
- val decls = new Scope
+ val decls = newScope
val templateNamer = newNamer(context.make(templ, clazz, decls))
templateNamer enterSyms templ.body
@@ -843,10 +848,10 @@ trait Namers extends MethodSynthesis {
Namers.this.classOfModuleClass get clazz foreach { cdefRef =>
val cdef = cdefRef()
if (cdef.mods.isCase) addApplyUnapply(cdef, templateNamer)
- addMacroMethods(cdef.impl, templateNamer)
+ if (settings.Xmacros.value) addMacroMethods(cdef.impl, templateNamer)
classOfModuleClass -= clazz
}
- addMacroMethods(templ, templateNamer)
+ if (settings.Xmacros.value) addMacroMethods(templ, templateNamer)
}
// add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because
@@ -919,7 +924,7 @@ trait Namers extends MethodSynthesis {
}
def thisMethodType(restpe: Type) = {
- val checkDependencies = new DependentTypeChecker(context)
+ val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
@@ -953,9 +958,9 @@ trait Namers extends MethodSynthesis {
// def overriddenSymbol = meth.nextOverriddenSymbol
// fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || listutil.mexists(vparamss)(_.tpt.isEmpty))) {
+ if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
// try to complete from matching definition in base type
- listutil.mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
val overridden = overriddenSymbol
if (overridden != NoSymbol && !overridden.isOverloaded) {
overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
@@ -993,9 +998,9 @@ trait Namers extends MethodSynthesis {
_.info.isInstanceOf[MethodType])) {
vparamSymss = List(List())
}
- listutil.mforeach(vparamss) { vparam =>
+ mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
- context.error(vparam.pos, "missing parameter type")
+ MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
@@ -1073,7 +1078,7 @@ trait Namers extends MethodSynthesis {
// Create trees for the defaultGetter. Uses tools from Unapplies.scala
var deftParams = tparams map copyUntyped[TypeDef]
- val defvParamss = listutil.mmap(previous) { p =>
+ val defvParamss = mmap(previous) { p =>
// in the default getter, remove the default parameter
val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
UnTyper.traverse(p1)
@@ -1263,7 +1268,7 @@ trait Namers extends MethodSynthesis {
val typer1 = typer.constrTyperIf(isBeforeSupercall)
if (tpt.isEmpty) {
if (rhs.isEmpty) {
- context.error(tpt.pos, "missing parameter type");
+ MissingParameterOrValTypeError(tpt)
ErrorType
}
else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
@@ -1277,7 +1282,7 @@ trait Namers extends MethodSynthesis {
val expr1 = typer.typedQualifier(expr)
typer checkStable expr1
if (expr1.symbol != null && expr1.symbol.isRootPackage)
- context.error(tree.pos, "_root_ cannot be imported")
+ RootImportError(tree)
val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
checkSelectors(newImport)
@@ -1291,7 +1296,7 @@ trait Namers extends MethodSynthesis {
val result =
try getSig
- catch typeErrorHandler(tree.pos, ErrorType)
+ catch typeErrorHandler(tree, ErrorType)
result match {
case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => typer.deskolemizeTypeParams(tparams)(result)
@@ -1338,43 +1343,43 @@ trait Namers extends MethodSynthesis {
* - declarations only in mixins or abstract classes (when not @native)
*/
def validate(sym: Symbol) {
- def fail(msg: String) = context.error(sym.pos, msg)
+ import SymValidateErrors._
+ def fail(kind: SymValidateErrors.Value) = SymbolValidationError(sym, kind)
+
def checkWithDeferred(flag: Int) {
if (sym hasFlag flag)
- fail("abstract member may not have " + flagsToString(flag) + " modifier")
+ AbstractMemberWithModiferError(sym, flag)
}
def checkNoConflict(flag1: Int, flag2: Int) {
if (sym hasAllFlags flag1 | flag2)
- fail("illegal combination of modifiers: %s and %s for: %s".format(
- flagsToString(flag1), flagsToString(flag2), sym))
+ IllegalModifierCombination(sym, flag1, flag2)
}
if (sym.isImplicit) {
if (sym.isConstructor)
- fail("`implicit' modifier not allowed for constructors")
+ fail(ImplicitConstr)
if (!sym.isTerm)
- fail("`implicit' modifier can be used only for values, variables and methods")
+ fail(ImplicitNotTerm)
if (sym.owner.isPackageClass)
- fail("`implicit' modifier cannot be used for top-level objects")
+ fail(ImplicitTopObject)
}
if (sym.isClass) {
if (sym.isAnyOverride && !sym.hasFlag(TRAIT))
- fail("`override' modifier not allowed for classes")
- }
- else {
+ fail(OverrideClass)
+ } else {
if (sym.isSealed)
- fail("`sealed' modifier can be used only for classes")
+ fail(SealedNonClass)
if (sym.hasFlag(ABSTRACT))
- fail("`abstract' modifier can be used only for classes; it should be omitted for abstract members")
+ fail(AbstractNonClass)
}
if (sym.isConstructor && sym.isAnyOverride)
- fail("`override' modifier not allowed for constructors")
+ fail(OverrideConstr)
if (sym.isAbstractOverride && !sym.owner.isTrait)
- fail("`abstract override' modifier only allowed for members of traits")
+ fail(AbstractOverride)
if (sym.isLazy && sym.hasFlag(PRESUPER))
- fail("`lazy' definitions may not be initialized early")
+ fail(LazyAndEarlyInit)
if (sym.info.typeSymbol == FunctionClass(0) && sym.isValueParameter && sym.owner.isCaseClass)
- fail("pass-by-name arguments not allowed for case class parameters")
+ fail(ByNameParameter)
if (sym.isDeferred) {
// Is this symbol type always allowed the deferred flag?
@@ -1391,10 +1396,9 @@ trait Namers extends MethodSynthesis {
)
if (sym hasAnnotation NativeAttr)
sym resetFlag DEFERRED
- else if (!symbolAllowsDeferred && ownerRequiresConcrete) {
- fail("only classes can have declared but undefined members" + abstractVarMessage(sym))
- sym resetFlag DEFERRED
- }
+ else if (!symbolAllowsDeferred && ownerRequiresConcrete)
+ fail(AbstractVar)
+
checkWithDeferred(PRIVATE)
checkWithDeferred(FINAL)
}
@@ -1458,14 +1462,14 @@ trait Namers extends MethodSynthesis {
// def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2])
// moreover, the latter is not an encoding of the former, which hides type
// inference of T2, so you can specify T while T2 is purely computed
- private class DependentTypeChecker(ctx: Context) extends TypeTraverser {
+ private class DependentTypeChecker(ctx: Context)(namer: Namer) extends TypeTraverser {
private[this] val okParams = mutable.Set[Symbol]()
private[this] val method = ctx.owner
def traverse(tp: Type) = tp match {
case SingleType(_, sym) =>
if (sym.owner == method && sym.isValueParameter && !okParams(sym))
- ctx.error(sym.pos, "illegal dependent method type" + errorAddendum)
+ namer.NamerErrorGen.IllegalDependentMethTpeError(sym)(ctx)
case _ => mapOver(tp)
}
@@ -1478,8 +1482,6 @@ trait Namers extends MethodSynthesis {
okParams ++= vps
}
}
- private def errorAddendum =
- ": parameter appears in the type of another parameter in the same section or an earlier one"
}
@deprecated("Use underlyingSymbol instead", "2.10.0")
@@ -1508,7 +1510,7 @@ trait Namers extends MethodSynthesis {
}
catch {
case e: InvalidCompanions =>
- ctx.error(original.pos, e.getMessage)
+ ctx.unit.error(original.pos, e.getMessage)
NoSymbol
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 8611fafe52..3a3c244d1c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -17,6 +17,7 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
+ import NamesDefaultsErrorsGen._
val defaultParametersOfMethod =
perRunCaches.newWeakMap[Symbol, Set[Symbol]]() withDefaultValue Set()
@@ -154,10 +155,9 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(qual.pos, unit.freshTermName("qual$"))
- .setInfo(qual.tpe)
- blockTyper.context.scope.enter(sym)
- val vd = atPos(sym.pos)(ValDef(sym, qual).setType(NoType))
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe
+ blockTyper.context.scope enter sym
+ val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
// don't use treeCopy: it would assign opaque position.
@@ -260,7 +260,7 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
- val symPs = (args, paramTypes).zipped map ((arg, tpe) => {
+ val symPs = map2(args, paramTypes)((arg, tpe) => {
val byName = isByNameParamType(tpe)
val (argTpe, repeated) =
if (isScalaRepeatedParamType(tpe)) arg match {
@@ -269,14 +269,14 @@ trait NamesDefaults { self: Analyzer =>
case _ =>
(seqType(arg.tpe), true)
} else (arg.tpe, false)
- val s = context.owner.newValue(arg.pos, unit.freshTermName("x$"))
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos)
val valType = if (byName) functionType(List(), argTpe)
else if (repeated) argTpe
else argTpe
s.setInfo(valType)
(context.scope.enter(s), byName, repeated)
})
- (symPs, args).zipped map {
+ map2(symPs, args) {
case ((sym, byName, repeated), arg) =>
val body =
if (byName) {
@@ -313,8 +313,7 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
-
- if (typedApp.tpe.isError) setError(tree)
+ if (typedApp.isErrorTyped) tree
else typedApp match {
// Extract the typed arguments, restore the call-site evaluation order (using
// ValDef's in the block), change the arguments to these local values.
@@ -326,13 +325,15 @@ trait NamesDefaults { self: Analyzer =>
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => {
+ val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
val ref = gen.mkAttributedRef(vDef.symbol)
atPos(vDef.pos.focus) {
// for by-name parameters, the local value is a nullary function returning the argument
- if (isByNameParamType(tpe)) Apply(ref, List())
- else if (isScalaRepeatedParamType(tpe)) Typed(ref, Ident(tpnme.WILDCARD_STAR))
- else ref
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
@@ -340,7 +341,7 @@ trait NamesDefaults { self: Analyzer =>
res.setPos(res.pos.makeTransparent)
val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos)
context.namedApplyBlockInfo =
- Some((block, NamedApplyInfo(qual, targs, vargss ::: List(refArgs), blockTyper)))
+ Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
}
}
@@ -383,6 +384,7 @@ trait NamesDefaults { self: Analyzer =>
if (missing forall (_.hasDefaultFlag)) {
val defaultArgs = missing flatMap (p => {
val defGetter = defaultGetter(p, context)
+ // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
else {
var default1 = qual match {
@@ -430,6 +432,78 @@ trait NamesDefaults { self: Analyzer =>
}
} else NoSymbol
}
+
+ private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
+ val savedParams = context.extractUndetparams()
+ val savedReporting = context.ambiguousErrors
+
+ context.setAmbiguousErrors(false)
+ try fn(savedParams)
+ finally {
+ context.setAmbiguousErrors(savedReporting)
+ //@M note that we don't get here when an ambiguity was detected (during the computation of res),
+ // as errorTree throws an exception
+ context.undetparams = savedParams
+ }
+ }
+
+ /** Fast path for ambiguous assignment check.
+ */
+ private def isNameInScope(context: Context, name: Name) = (
+ context.enclosingContextChain exists (ctx =>
+ (ctx.scope.lookupEntry(name) != null)
+ || (ctx.owner.rawInfo.member(name) != NoSymbol)
+ )
+ )
+
+ /** A full type check is very expensive; let's make sure there's a name
+ * somewhere which could potentially be ambiguous before we go that route.
+ */
+ private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = {
+ import typer.context
+ isNameInScope(context, param.name) && {
+ // for named arguments, check whether the assignment expression would
+ // typecheck. if it does, report an ambiguous error.
+ val paramtpe = param.tpe.cloneInfo(param)
+ // replace type parameters by wildcard. in the below example we need to
+ // typecheck (x = 1) with wildcard (not T) so that it succeeds.
+ // def f[T](x: T) = x
+ // var x = 0
+ // f(x = 1) << "x = 1" typechecks with expected type WildcardType
+ savingUndeterminedTParams(context) { udp =>
+ val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
+ override def apply(tp: Type): Type = super.apply(tp match {
+ case TypeRef(_, ByNameParamClass, x :: Nil) => x
+ case _ => tp
+ })
+ }
+ // This throws an exception which is caught in `tryTypedApply` (as it
+ // uses `silent`) - unfortunately, tryTypedApply recovers from the
+ // exception if you use errorTree(arg, ...) and conforms is allowed as
+ // a view (see tryImplicit in Implicits) because it tries to produce a
+ // new qualifier (if the old one was P, the new one will be
+ // conforms.apply(P)), and if that works, it pretends nothing happened.
+ //
+ // To make sure tryTypedApply fails, we would like to pass EmptyTree
+ // instead of arg, but can't do that because eventually setType(ErrorType)
+ // is called, and EmptyTree can only be typed NoType. Thus we need to
+ // disable conforms as a view...
+ try typer.silent(_.typed(arg, subst(paramtpe))) match {
+ case SilentResultValue(t) => !t.isErroneous // #4041
+ case _ => false
+ }
+ catch {
+ // `silent` only catches and returns TypeErrors which are not
+ // CyclicReferences. Fix for #3685
+ case cr @ CyclicReference(sym, _) =>
+ (sym.name == param.name) && sym.accessedOrSelf.isVariable && {
+ NameClashError(sym, arg)(typer.context)
+ true
+ }
+ }
+ }
+ }
+ }
/**
* Removes name assignments from args. Additionally, returns an array mapping
@@ -439,71 +513,37 @@ trait NamesDefaults { self: Analyzer =>
* after named ones.
*/
def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = {
- import typer.infer.errorTree
-
- // maps indicies from (order written by user) to (order of definition)
- val argPos = (new Array[Int](args.length)) map (x => -1)
+ implicit val context0 = typer.context
+ // maps indices from (order written by user) to (order of definition)
+ val argPos = Array.fill(args.length)(-1)
var positionalAllowed = true
- val namelessArgs = for ((arg, index) <- (args.zipWithIndex)) yield arg match {
- case a @ AssignOrNamedArg(Ident(name), rhs) =>
- val (pos, newName) = paramPos(params, name)
- newName.foreach(n => {
- typer.context.unit.deprecationWarning(arg.pos, "the parameter name "+ name +" has been deprecated. Use "+ n +" instead.")
- })
- if (pos == -1) {
- if (positionalAllowed) {
- argPos(index) = index
- // prevent isNamed from being true when calling doTypedApply recursively,
- // treat the arg as an assignment of type Unit
- Assign(a.lhs, rhs).setPos(arg.pos)
- } else {
- errorTree(arg, "unknown parameter name: "+ name)
- }
- } else if (argPos contains pos) {
- errorTree(arg, "parameter specified twice: "+ name)
- } else {
- // for named arguments, check whether the assignment expression would
- // typecheck. if it does, report an ambiguous error.
- val param = params(pos)
- val paramtpe = params(pos).tpe.cloneInfo(param)
- // replace type parameters by wildcard. in the below example we need to
- // typecheck (x = 1) with wildcard (not T) so that it succeeds.
- // def f[T](x: T) = x
- // var x = 0
- // f(x = 1) << "x = 1" typechecks with expected type WildcardType
- val udp = typer.context.extractUndetparams()
- val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = tp match {
- case TypeRef(_, ByNameParamClass, List(arg)) => super.apply(arg)
- case _ => super.apply(tp)
+ val namelessArgs = mapWithIndex(args) { (arg, index) =>
+ arg match {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ def matchesName(param: Symbol) = !param.isSynthetic && (
+ (param.name == name) || (param.deprecatedParamName match {
+ case Some(`name`) =>
+ context0.unit.deprecationWarning(arg.pos,
+ "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
+ true
+ case _ => false
+ })
+ )
+ val pos = params indexWhere matchesName
+ if (pos == -1) {
+ if (positionalAllowed) {
+ argPos(index) = index
+ // prevent isNamed from being true when calling doTypedApply recursively,
+ // treat the arg as an assignment of type Unit
+ Assign(arg.lhs, rhs) setPos arg.pos
}
+ else UnknownParameterNameNamesDefaultError(arg, name)
}
- val reportAmbiguousErrors = typer.context.reportAmbiguousErrors
- typer.context.reportAmbiguousErrors = false
-
- var variableNameClash = false
- val typedAssign = try {
- typer.silent(_.typed(arg, subst(paramtpe)))
- } catch {
- // `silent` only catches and returns TypeErrors which are not
- // CyclicReferences. Fix for #3685
- case cr @ CyclicReference(sym, info) if sym.name == param.name =>
- if (sym.isVariable || sym.isGetter && sym.accessed.isVariable) {
- // named arg not allowed
- variableNameClash = true
- typer.context.error(sym.pos,
- "%s definition needs %s because '%s' is used as a named argument in its body.".format(
- "variable", // "method"
- "type", // "result type"
- sym.name
- )
- )
- typer.infer.setError(arg)
- }
- else cr
- }
-
- def applyNamedArg = {
+ else if (argPos contains pos)
+ DoubleParamNamesDefaultError(arg, name)
+ else if (isAmbiguousAssignment(typer, params(pos), arg))
+ AmbiguousReferenceInNamesDefaultError(arg, name)
+ else {
// if the named argument is on the original parameter
// position, positional after named is allowed.
if (index != pos)
@@ -511,63 +551,13 @@ trait NamesDefaults { self: Analyzer =>
argPos(index) = pos
rhs
}
-
- val res = typedAssign match {
- case _: TypeError => applyNamedArg
-
- case t: Tree =>
- if (t.isErroneous && !variableNameClash) {
- applyNamedArg
- } else if (t.isErroneous) {
- t // name clash with variable. error was already reported above.
- } else {
- // This throws an exception which is caught in `tryTypedApply` (as it
- // uses `silent`) - unfortunately, tryTypedApply recovers from the
- // exception if you use errorTree(arg, ...) and conforms is allowed as
- // a view (see tryImplicit in Implicits) because it tries to produce a
- // new qualifier (if the old one was P, the new one will be
- // conforms.apply(P)), and if that works, it pretends nothing happened.
- //
- // To make sure tryTypedApply fails, we would like to pass EmptyTree
- // instead of arg, but can't do that because eventually setType(ErrorType)
- // is called, and EmptyTree can only be typed NoType. Thus we need to
- // disable conforms as a view...
- errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+
- "name of the method and the name of a variable currently in scope.")
- }
- }
-
- typer.context.reportAmbiguousErrors = reportAmbiguousErrors
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- typer.context.undetparams = udp
- res
- }
- case _ =>
- argPos(index) = index
- if (positionalAllowed) arg
- else errorTree(arg, "positional after named argument.")
- }
- (namelessArgs, argPos)
- }
-
- /**
- * Returns
- * - the position of the parameter named `name`
- * - optionally, if `name` is @deprecatedName, the new name
- */
- def paramPos(params: List[Symbol], name: Name): (Int, Option[Name]) = {
- var i = 0
- var rest = params
- while (!rest.isEmpty) {
- val p = rest.head
- if (!p.isSynthetic) {
- if (p.name == name) return (i, None)
- if (p.deprecatedParamName == Some(name)) return (i, Some(p.name))
+ case _ =>
+ argPos(index) = index
+ if (positionalAllowed) arg
+ else PositionalAfterNamedNamesDefaultError(arg)
}
- i += 1
- rest = rest.tail
}
- (-1, None)
+
+ (namelessArgs, argPos)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index 75a5ad6f8a..6d31243fd0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -7,8 +7,7 @@ package scala.tools.nsc
package typechecker
import symtab._
-import Flags.{ CASE => _, _ }
-
+import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC}
/** Translate pattern matching into method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled.
*
@@ -16,29 +15,16 @@ import Flags.{ CASE => _, _ }
* (lifting the body of the case into the monad using `one`).
*
* Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
- *
- * The monad `M` in which the pattern match is interpreted is determined by solving `implicitly[MatchingStrategy[M]]` for M.
- * Predef provides the default, `OptionMatching`
-
- * Example translation: TODO
-
- scrut match { case Person(father@Person(_, fatherName), name) if fatherName == name => }
- scrut match { case Person(father, name) => father match {case Person(_, fatherName) => }}
- Person.unapply(scrut) >> ((father, name) => (Person.unapply(father) >> (_, fatherName) => check(fatherName == name) >> (_ => body)))
-
- (a => (Person.unapply(a).>>(
- b => Person.unapply(b._1).>>(
- c => check(c._2 == b._2).>>(
- d => body)))))(scrut)
-
-TODO:
- - optimizer loops on virtpatmat compiler?
-
- - don't orElse a failure case at the end if there's a default case
- - implement spec more closely (see TODO's below)
- - fix inlining of methods in nested objects
+ * TODO:
+ * - interaction with CPS
+ * - Array patterns
+ * - implement spec more closely (see TODO's)
+ * - DCE
+ * - use manifests for type testing
+ *
* (longer-term) TODO:
+ * - user-defined unapplyProd
* - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
* - recover exhaustivity and unreachability checking using a variation on the type-safe builder pattern
*/
@@ -46,28 +32,90 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
import global._
import definitions._
- private lazy val matchingStrategyTycon = definitions.getClass("scala.MatchingStrategy").typeConstructor
+ object vpmName {
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+ val __match = newTermName("__match")
+
+ def counted(str: String, i: Int) = newTermName(str+i)
+ }
- class MatchTranslator(typer: Typer) extends MatchCodeGen {
- def typed(tree: Tree, mode: Int, pt: Type): Tree = typer.typed(tree, mode, pt) // for MatchCodeGen -- imports don't provide implementations for abstract members
+ object MatchTranslator {
+ def apply(typer: Typer): MatchTranslation = {
+ import typer._
+ // typing `__match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
+ newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ case SilentResultValue(ms) => new PureMatchTranslator(typer, ms)
+ case _ => new OptimizingMatchTranslator(typer)
+ }
+ }
+ }
- import typer._
- import typeDebug.{ ptTree, ptBlock, ptLine }
+ class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen
+ class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations
- def solveContextBound(contextBoundTp: Type): (Tree, Type) = {
- val solSym = NoSymbol.newTypeParameter(NoPosition, "SolveImplicit$".toTypeName)
- val param = solSym.setInfo(contextBoundTp.typeSymbol.typeParams(0).info.cloneInfo(solSym)) // TypeBounds(NothingClass.typeConstructor, baseTp)
- val pt = appliedType(contextBoundTp, List(param.tpeHK))
- val savedUndets = context.undetparams
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// talking to userland
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- context.undetparams = param :: context.undetparams
- val result = inferImplicit(EmptyTree, pt, false, false, context)
- context.undetparams = savedUndets
+ /** Interface with user-defined match monad?
+ * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+
+ type Matcher[P[_], M[+_], A] = {
+ def flatMap[B](f: P[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ abstract class MatchStrategy[P[_], M[+_]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
+
+ def zero: M[Nothing]
+ def one[T](x: P[T]): M[T]
+ def guard[T](cond: P[Boolean], then: => P[T]): M[T]
+ def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt
+ }
+
+ * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
+
+
+ * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+
+ object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
+ def zero = None
+ def one[T](x: T) = Some(x)
+ // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
+ def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
+ def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
+ def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty
+ }
+
+ */
+ trait MatchMonadInterface {
+ val typer: Typer
+ val matchOwner = typer.context.owner
+
+ def inMatchMonad(tp: Type): Type
+ def pureType(tp: Type): Type
+ final def matchMonadResult(tp: Type): Type =
+ tp.baseType(matchMonadSym).typeArgs match {
+ case arg :: Nil => arg
+ case _ => ErrorType
+ }
- (result.tree, result.subst.to(result.subst.from indexOf param))
- }
+ protected def matchMonadSym: Symbol
+ }
- lazy val (matchingStrategy, matchingMonadType) = solveContextBound(matchingStrategyTycon)
+ trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
+ import typer.{typed, context, silent, reallyExists}
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -77,20 +125,25 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
*
* NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
* thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchingStrategy must be solved for each nested patternmatch)
+ * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
*/
def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type): Tree = {
// we don't transform after typers
// (that would require much more sophistication when generating trees,
// and the only place that emits Matches after typers is for exception handling anyway)
- assert(phase.id <= currentRun.typerPhase.id)
+ assert(phase.id <= currentRun.typerPhase.id, phase)
+
+ def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
+ case _ => tp
+ }
val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
- val scrutSym = freshSym(scrut.pos, scrutType)
+ val scrutSym = freshSym(scrut.pos, pureType(scrutType))
val okPt = repeatedToSeq(pt)
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, context.owner)
+ combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner)
}
@@ -141,6 +194,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// must use type `tp`, which is provided by extractor's result, not the type expected by binder,
// as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
// (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
(extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // println("changing "+ b +" : "+ b.info +" -> "+ tp);
// println("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
@@ -217,12 +271,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
and it binds the variable name to that value.
**/
case Bound(subpatBinder, p) =>
- // TreeMaker with empty list of trees only performs the substitution subpatBinder --> patBinder
- // println("rebind "+ subpatBinder +" to "+ patBinder)
- withSubPats(List(SubstOnlyTreeMaker(Substitution(subpatBinder, CODE.REF(patBinder)))),
- // the symbols are markers that may be used to refer to the result of the extractor in which the corresponding tree is nested
- // it's the responsibility of the treemaker to replace this symbol by a reference that
- // selects that result on the function symbol of the flatMap call that binds to the result of this extractor
+ // replace subpatBinder by patBinder (as if the Bind was not there)
+ withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
// must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
(patBinder, p)
)
@@ -268,7 +318,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
if (guard == EmptyTree) Nil
else List(GuardTreeMaker(guard))
- // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by pmgen.one),
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
// so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
// to enable this, probably need to move away from Option to a monad specific to pattern-match,
// so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
@@ -317,7 +367,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val extractorCall = try {
context.undetparams = Nil
silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case extractorCall: Tree => extractorCall // if !extractorCall.containsError()
+ case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError()
case _ =>
// this fails to resolve overloading properly...
// Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
@@ -381,34 +431,32 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
- protected def tupleSel(binder: Symbol)(i: Int): Tree = pmgen.tupleSel(binder)(i)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require isSeq
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- // only relevant if isSeq: (here to avoid capturing too much in the returned closure)
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
+ val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
+ val nbIndexingIndices = indexingIndices.length
- // this error is checked by checkStarPatOK
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // this error-condition has already been checked by checkStarPatOK:
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
// there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
(((1 to firstIndexingBinder) map tupleSel(binder)) ++
// then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map pmgen.index(seqTree(binder))) ++
+ (indexingIndices map codegen.index(seqTree(binder))) ++
// the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
(if(!lastIsStar) Nil else List(
if(nbIndexingIndices == 0) seqTree(binder)
- else pmgen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] = {
+ protected def subPatRefs(binder: Symbol): List[Tree] =
if (nbSubPats == 0) Nil
else if (isSeq) subPatRefsSeq(binder)
else ((1 to nbSubPats) map tupleSel(binder)).toList
- }
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
@@ -429,7 +477,9 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}
}
- // TODO: to be called when there's a def unapplyProd(x: T): Product_N
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ //
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
// TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
@@ -441,15 +491,15 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
// println("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
// println("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
- private def extractorTp = fun.tpe
+ private def constructorTp = fun.tpe
def isTyped = fun.isTyped
// to which type should the previous binder be casted?
- def paramType = extractorTp.finalResultType
+ def paramType = constructorTp.finalResultType
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
- protected def rawSubPatTypes = extractorTp.paramTypes
+ protected def rawSubPatTypes = constructorTp.paramTypes
// binder has type paramType
def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
@@ -458,31 +508,20 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}
/* TODO: remove special case when the following bug is fixed
-scala> :paste
-// Entering paste mode (ctrl-D to finish)
-
class Foo(x: Other) { x._1 } // BUG: can't refer to _1 if its defining class has not been type checked yet
case class Other(y: String)
-
-// Exiting paste mode, now interpreting.
-
-<console>:8: error: value _1 is not a member of Other
- class Foo(x: Other) { x._1 }
- ^
-
-scala> case class Other(y: String)
-defined class Other
-
-scala> class Foo(x: Other) { x._1 }
-defined class Foo */
+-- this is ok:
+case class Other(y: String)
+class Foo(x: Other) { x._1 } // no error in this order
+*/
override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
val caseAccs = binder.info.typeSymbol.caseFieldAccessors
if (caseAccs isDefinedAt (i-1)) REF(binder) DOT caseAccs(i-1)
- else pmgen.tupleSel(binder)(i)
+ else codegen.tupleSel(binder)(i)
}
- override def toString(): String = "case class "+ (if (extractorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
}
class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
@@ -497,7 +536,7 @@ defined class Foo */
def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, resultInMonad) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass)
}
@@ -526,12 +565,7 @@ defined class Foo */
// turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else {
- val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs
- // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply??
- if(monadArgs.length == 1) monadArgs(0)
- else ErrorType
- }
+ else matchMonadResult(resultType)
}
protected lazy val rawSubPatTypes =
@@ -545,81 +579,6 @@ defined class Foo */
override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
}
- // tack an outer test onto `cond` if binder.info and expectedType warrant it
- def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._
- if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, binder.info, context.owner)) {
- val expectedPrefix = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- }
-
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
- val outerCheck = (Select(pmgen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix
-
- // first check cond, since that should ensure we're not selecting outer on null
- pmgen.and(cond, outerCheck)
- }
- else
- cond
- }
-
- // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest)
- def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt)
- def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(pmgen._isInstanceOf(binder, pt))
-
- /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
- - A reference to a class C, p.C, or T#C.
- This type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining class instances.
- For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
- The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
-
- - A singleton type p.type.
- This type pattern matches only the value denoted by the path p
- (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
- // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
-
- - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
- This type pattern matches all values that are matched by each of the type patterns Ti.
-
- - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
- This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
- The bounds or alias type of these type variable are determined as described in (§8.3).
-
- - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
- This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
- **/
-
- // generate the tree for the run-time test that follows from the fact that
- // a `scrut` of known type `scrutTp` is expected to have type `expectedTp`
- // uses maybeWithOuterCheck to check the type's prefix
- def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._
- // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
- // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
- def genEqualsAndInstanceOf(sym: Symbol): Tree
- = pmgen._equals(REF(sym), patBinder) AND pmgen._isInstanceOf(patBinder, pt.widen)
-
- def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe
-
- val patBinderTp = patBinder.info.widen
- def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt)
-
- // TODO: [SPEC] type test for Array
- // TODO: use manifests to improve tests (for erased types we can do better when we have a manifest)
- pt match {
- case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here
- case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil
- case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym)
- case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL
- case ConstantType(const) => pmgen._equals(Literal(const), patBinder)
- case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL)
- case _ => typeTest(patBinder, pt)
- }
- }
-
/** A conservative approximation of which patterns do not discern anything.
* They are discarded during the translation.
*/
@@ -645,14 +604,74 @@ defined class Foo */
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TypedSubstitution extends MatchMonadInterface {
+ object Substitution {
+ def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
+ }
+
+ class Substitution(val from: List[Symbol], val to: List[Tree]) {
+ // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
+ // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
+ def apply(tree: Tree): Tree = {
+ // according to -Ystatistics 10% of translateMatch's time is spent in this method...
+ // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
+ if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
+ else (new Transformer {
+ @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ if (origTp == null || origTp == NoType) to
+ // important: only type when actually substing and when original tree was typed
+ // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
+ else typer.typed(to, EXPRmode, WildcardType)
+
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe)
+ else subst(from.tail, to.tail)
+
+ tree match {
+ case Ident(_) => subst(from, to)
+ case _ => super.transform(tree)
+ }
+ }
+ }).transform(tree)
+ }
+
+
+ // the substitution that chains `other` before `this` substitution
+ // forall t: Tree. this(other(t)) == (this >> other)(t)
+ def >>(other: Substitution): Substitution = {
+ val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
+ new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ }
+ override def toString = (from zip to) mkString("Substitution(", ", ", ")")
+ }
+
+ object EmptySubstitution extends Substitution(Nil, Nil) {
+ override def apply(tree: Tree): Tree = tree
+ override def >>(other: Substitution): Substitution = other
+ }
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// the making of the trees
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) =
+ (cases, Nil)
- trait TreeMakers {
- def inMatchMonad(tp: Type): Type = appliedType(matchingMonadType, List(tp))
- lazy val optimizingCodeGen = matchingMonadType.typeSymbol eq OptionClass
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] =
+ None
abstract class TreeMaker {
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
def substitution: Substitution =
if (currSub eq null) localSubstitution
else currSub
@@ -670,7 +689,6 @@ defined class Foo */
// build Tree that chains `next` after the current extractor
def chainBefore(next: Tree, pt: Type): Tree
- def treesToHoist: List[Tree] = Nil
}
case class TrivialTreeMaker(tree: Tree) extends TreeMaker {
@@ -681,71 +699,30 @@ defined class Foo */
case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker {
val localSubstitution: Substitution = EmptySubstitution
def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(substitution(pmgen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here
+ atPos(body.pos)(substitution(codegen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here
}
- case class SubstOnlyTreeMaker(localSubstitution: Substitution) extends TreeMaker {
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
def chainBefore(next: Tree, pt: Type): Tree = substitution(next)
}
abstract class FunTreeMaker extends TreeMaker {
val nextBinder: Symbol
-
- // for CSE (used iff optimizingCodeGen)
- // TODO: factor this out -- don't mutate treemakers
- var reused: Boolean = false
- def reusedBinders: List[Symbol] = Nil
- override def treesToHoist: List[Tree] = { import CODE._
- reusedBinders map { b => VAL(b) === pmgen.mkZero(b.info) }
- }
}
- abstract class FreshFunTreeMaker extends FunTreeMaker {
+ abstract class CondTreeMaker extends FunTreeMaker {
val pos: Position
val prevBinder: Symbol
val nextBinderTp: Type
- lazy val nextBinder = freshSym(pos, nextBinderTp)
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- }
-
- // TODO: factor out optimization-specific stuff into codegen
- abstract class CondTreeMaker extends FreshFunTreeMaker { import CODE._
val cond: Tree
val res: Tree
- // for CSE (used iff optimizingCodeGen)
- // must set reused before!
- override lazy val reusedBinders = if(reused) List(freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE, nextBinder setFlag MUTABLE) else Nil
- def storedCond = reusedBinders(0)
- def storedRes = reusedBinders(1)
+ lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
def chainBefore(next: Tree, pt: Type): Tree =
- if (!reused)
- atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next)))
- else { // for CSE (used iff optimizingCodeGen)
- IF (cond) THEN BLOCK(
- storedCond === TRUE,
- storedRes === res,
- substitution(next).duplicate // TODO: finer-grained dup'ing
- ) ELSE pmgen.zero
- }
- }
-
- // for CSE (used iff optimizingCodeGen)
- case class ReusingCondTreeMaker(dropped_priors: List[(TreeMaker, Option[TreeMaker])]) extends TreeMaker { import CODE._
- lazy val localSubstitution = {
- val (from, to) = dropped_priors.collect {case (dropped: CondTreeMaker, Some(prior: CondTreeMaker)) => (dropped.nextBinder, REF(prior.storedRes))}.unzip
- val oldSubs = dropped_priors.collect {case (dropped: TreeMaker, _) => dropped.substitution}
- oldSubs.foldLeft(Substitution(from, to))(_ >> _)
- }
-
- def chainBefore(next: Tree, pt: Type): Tree = {
- val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: CondTreeMaker)) => ctm}.get.storedCond)
-
- IF (cond) THEN BLOCK(
- substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- ) ELSE pmgen.zero
- }
+ atPos(pos)(codegen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next)))
}
/**
@@ -756,12 +733,13 @@ defined class Foo */
* in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
*/
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
- def chainBefore(next: Tree, pt: Type): Tree = atPos(extractor.pos)(
- if (extractorReturnsBoolean) pmgen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext(next)))
- else pmgen.flatMap(extractor, pmgen.fun(nextBinder, substitution(condAndNext(next))))
- )
-
- private def condAndNext(next: Tree): Tree = extraCond map (pmgen.condOptimized(_, next)) getOrElse next
+ def chainBefore(next: Tree, pt: Type): Tree = {
+ val condAndNext = extraCond map (codegen.ifThenElseZero(_, next)) getOrElse next
+ atPos(extractor.pos)(
+ if (extractorReturnsBoolean) codegen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext))
+ else codegen.flatMap(extractor, nextBinder, substitution(condAndNext))
+ )
+ }
override def toString = "X"+(extractor, nextBinder)
}
@@ -770,21 +748,42 @@ defined class Foo */
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
def chainBefore(next: Tree, pt: Type): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond match {
- case None => nullCheck
- case Some(c) => nullCheck AND c
- }
- pmgen.condOptimized(cond, substitution(next))
+ val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
+ codegen.ifThenElseZero(cond, substitution(next))
}
override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
}
+ // tack an outer test onto `cond` if binder.info and expectedType warrant it
+ def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._
+ if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, binder.info, matchOwner)) {
+ val expectedPrefix = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre => REF(pre.prefix, pre.termSymbol)
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
+ val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix
+
+ // first check cond, since that should ensure we're not selecting outer on null
+ codegen.and(cond, outerCheck)
+ }
+ else
+ cond
+ }
+
+ // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest)
+ def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt)
+ private def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(codegen._isInstanceOf(binder, pt))
// need to substitute since binder may be used outside of the next extractor call (say, in the body of the case)
case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker {
val cond = typeTest(prevBinder, nextBinderTp)
- val res = pmgen._asInstanceOf(prevBinder, nextBinderTp)
+ val res = codegen._asInstanceOf(prevBinder, nextBinderTp)
override def toString = "TT"+(prevBinder, nextBinderTp)
}
@@ -792,8 +791,58 @@ defined class Foo */
case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
val nextBinderTp = glb(List(patBinder.info.widen, pt))
+ /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+
+ // generate the tree for the run-time test that follows from the fact that
+ // a `scrut` of known type `scrutTp` is expected to have type `expectedTp`
+ // uses maybeWithOuterCheck to check the type's prefix
+ private def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def genEqualsAndInstanceOf(sym: Symbol): Tree
+ = codegen._equals(REF(sym), patBinder) AND codegen._isInstanceOf(patBinder, pt.widen)
+
+ def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe
+
+ val patBinderTp = patBinder.info.widen
+ def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt)
+
+ // TODO: [SPEC] type test for Array
+ // TODO: use manifests to improve tests (for erased types we can do better when we have a manifest)
+ pt match {
+ case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here
+ case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil
+ case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym)
+ case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL
+ case ConstantType(const) => codegen._equals(Literal(const), patBinder)
+ case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL)
+ case _ => typeTest(patBinder, pt)
+ }
+ }
+
val cond = typeAndEqualityTest(patBinder, pt)
- val res = pmgen._asInstanceOf(patBinder, nextBinderTp)
+ val res = codegen._asInstanceOf(patBinder, nextBinderTp)
override def toString = "TET"+(patBinder, pt)
}
@@ -803,7 +852,7 @@ defined class Foo */
// NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
- val cond = pmgen._equals(patTree, prevBinder)
+ val cond = codegen._equals(patTree, prevBinder)
val res = CODE.REF(prevBinder)
override def toString = "ET"+(prevBinder, patTree)
}
@@ -838,7 +887,7 @@ defined class Foo */
if (canDuplicate) {
altss map {altTreeMakers =>
combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt)
- } reduceLeft pmgen.typedOrElse(pt)
+ } reduceLeft codegen.typedOrElse(pt)
} else {
val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest")
// rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil))
@@ -850,7 +899,7 @@ defined class Foo */
)
BLOCK(
VAL(rest) === Function(Nil, substitution(next)),
- combinedAlts reduceLeft pmgen.typedOrElse(pt)
+ combinedAlts reduceLeft codegen.typedOrElse(pt)
)
}
)
@@ -859,14 +908,239 @@ defined class Foo */
case class GuardTreeMaker(guardTree: Tree) extends TreeMaker {
val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = pmgen.flatMapGuard(substitution(guardTree), next)
+ def chainBefore(next: Tree, pt: Type): Tree = codegen.flatMapGuard(substitution(guardTree), next)
override def toString = "G("+ guardTree +")"
}
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){
+ val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+
+ emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{
+ val (matcher, hasDefault, toHoist) =
+ if (casesUnOpt nonEmpty) {
+ // when specified, need to propagate pt explicitly (type inferencer can't handle it)
+ val optPt =
+ if (isFullyDefined(pt)) inMatchMonad(pt)
+ else NoType
+
+ // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ val hasDefault = casesUnOpt.nonEmpty && {
+ val nonTrivLast = casesUnOpt.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesUnOpt, pt)
+
+ val combinedCases =
+ cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt))
+
+ (combinedCases, hasDefault, toHoist)
+ } else (codegen.zero, false, Nil)
+
+ val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault)
+ if (toHoist isEmpty) expr
+ else Block(toHoist, expr)
+ }
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree =
+ treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt))
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
+ // println("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
+ assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor)
+ d.symbol.lazyAccessor.owner = currentOwner
+ }
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // println("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // println("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// generate actual trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait CodegenCore extends MatchMonadInterface {
+ private var ctr = 0
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1;
+ // assert(owner ne null)
+ // assert(owner ne NoSymbol)
+ NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp)
+ }
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
+ def zero: Tree
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree
+
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+
+ def fun(arg: Symbol, body: Tree): Tree
+ def ifThenElseZero(c: Tree, then: Tree): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def mkZero(tp: Type): Tree
+
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def and(a: Tree, b: Tree): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ }
+
+ def codegen: AbsCodegen
+
+ def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
+
+ abstract class CommonCodegen extends AbsCodegen { import CODE._
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def and(a: Tree, b: Tree): Tree = a AND b
+ def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp)
+ if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX)
+ else gen.mkAsInstanceOf(t, tpX, true, false)
+ }
+
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false)
+ // { val tpX = repackExistential(tp)
+ // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
+ // else gen.mkIsInstanceOf(REF(b), tpX, true, false)
+ // }
+
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp)
+ if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
+ else gen.mkAsInstanceOf(REF(b), tpX, true, false)
+ }
+
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
+ }
+ }
+ }
+
+ trait PureMatchMonadInterface extends MatchMonadInterface {
+ val matchStrategy: Tree
+
+ def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
+ def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.head
+ protected def matchMonadSym = oneSig.finalResultType.typeSymbol
+
+ import CODE._
+ def __match(n: Name): SelectStart = matchStrategy DOT n
+
+ private lazy val oneSig: Type =
+ typer.typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
+ }
+
+ trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
+ def codegen: AbsCodegen = pureCodegen
+
+ object pureCodegen extends CommonCodegen { import CODE._
+ //// methods in MatchingStrategy (the monad companion) -- used directly in translation
+ // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree
+ = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
+ // __match.one(`res`)
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res)
+ // __match.zero
+ def zero: Tree = __match(vpmName.zero)
+ // __match.guard(`c`, `then`)
+ def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then)
+
+ //// methods in the monad instance -- used directly in translation
+ // `prev`.flatMap(`b` => `next`)
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
+ // `thisCase`.orElse(`elseCase`)
+ def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next)
+ // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next)
+ }
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// OPTIMIZATIONS
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// decisions, decisions
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakerApproximation extends TreeMakers { self: CodegenCore =>
object Test {
var currId = 0
}
@@ -878,7 +1152,7 @@ defined class Foo */
private val reusedBy = new collection.mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
- assert(later.reuses.isEmpty)
+ assert(later.reuses.isEmpty, later.reuses)
reusedBy += later
later.reuses = Some(this)
}
@@ -959,25 +1233,16 @@ defined class Foo */
override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id
}
- /** a flow-sensitive, generalised, common sub-expression elimination
- * reuse knowledge from performed tests
- * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
- * when a sub-expression is share, it is stored in a mutable variable
- * the variable is floated up so that its scope includes all of the program that shares it
- * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
- *
- * intended to be generalised to exhaustivity/reachability checking
- */
- def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = {
// a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- val pointsToBound = collection.mutable.HashSet(prevBinder)
+ val pointsToBound = collection.mutable.HashSet(root)
// the substitution that renames variables to variables in pointsToBound
var normalize: Substitution = EmptySubstitution
// replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
// TODO check:
- // pointsToBound -- accumSubst.from == Set(prevBinder) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
var accumSubst: Substitution = EmptySubstitution
val trees = new collection.mutable.HashSet[Tree]
@@ -1033,12 +1298,28 @@ defined class Foo */
| GuardTreeMaker(_)
| ProductExtractorTreeMaker(_, Some(_), _) => Havoc
case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here
- case SubstOnlyTreeMaker(_) => Top
+ case SubstOnlyTreeMaker(_, _) => Top
case BodyTreeMaker(_, _) => Havoc
}, tm)
}
- val testss = cases.map { _ map approximateTreeMaker }
+ cases.map { _ map approximateTreeMaker }
+ }
+ }
+
+////
+ trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen =>
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is share, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ *
+ * intended to be generalised to exhaustivity/reachability checking
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ val testss = approximateMatch(prevBinder, cases)
// interpret:
val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
@@ -1069,7 +1350,11 @@ defined class Foo */
// then, collapse these contiguous sequences of reusing tests
// store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
// replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- testss map { tests =>
+ val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ var okToCall = false
+ val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
+
+ val res = testss map { tests =>
var currDeps = Set[Cond]()
val (sharedPrefix, suffix) = tests span { test =>
(test.cond eq Top) || (for(
@@ -1081,18 +1366,67 @@ defined class Foo */
}
val collapsedTreeMakers = if (sharedPrefix.nonEmpty) { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
- for (test <- sharedPrefix; reusedTest <- test.reuses; if reusedTest.treeMaker.isInstanceOf[FunTreeMaker])
- reusedTest.treeMaker.asInstanceOf[FunTreeMaker].reused = true
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
+
// println("sharedPrefix: "+ sharedPrefix)
for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
lastReused <- lastShared.reuses)
- yield ReusingCondTreeMaker(sharedPrefix map (t => (t.treeMaker, t.reuses map (_.treeMaker)))) :: suffix.map(_.treeMaker)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
} else None
collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
}
+ okToCall = true // TODO: remove (debugging)
+
+ res mapConserve (_ mapConserve reusedOrOrig)
+ }
+
+ object ReusedCondTreeMaker {
+ def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
+ }
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, pos: Position) extends TreeMaker { import CODE._
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val treesToHoist: List[Tree] = {
+ nextBinder setFlag MUTABLE
+ List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ }
+
+ // TODO: finer-grained duplication
+ def chainBefore(next: Tree, pt: Type): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(optimizedCodegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+ }
+
+ case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
+ lazy val dropped_priors = sharedPrefix map (t => (toReused(t.treeMaker), t.reuses map (test => toReused(test.treeMaker))))
+ lazy val localSubstitution = {
+ val (from, to) = dropped_priors.collect {
+ case (dropped: CondTreeMaker, Some(prior: ReusedCondTreeMaker)) =>
+ (dropped.nextBinder, REF(prior.nextBinder))
+ }.unzip
+ val oldSubs = dropped_priors.collect {
+ case (dropped: TreeMaker, _) =>
+ dropped.substitution
+ }
+ oldSubs.foldLeft(Substitution(from, to))(_ >> _)
+ }
+
+ def chainBefore(next: Tree, pt: Type): Tree = {
+ val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond)
+
+ IF (cond) THEN BLOCK(
+ substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ ) ELSE codegen.zero
+ }
}
+ }
+
+ //// DCE
+ trait DeadCodeElimination extends TreeMakers { self: CodegenCore =>
// TODO: non-trivial dead-code elimination
// e.g., the following match should compile to a simple instanceof:
// case class Ident(name: String)
@@ -1101,21 +1435,10 @@ defined class Foo */
// do minimal DCE
cases
}
+ }
-
- def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
-
- // a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
- def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
- var accumSubst: Substitution = initial
- treeMakers foreach { maker =>
- maker incorporateOuterSubstitution accumSubst
- accumSubst = maker.substitution
- }
- removeSubstOnly(treeMakers)
- }
-
+ //// SWITCHES
+ trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
object SwitchablePattern { def unapply(pat: Tree) = pat match {
case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches
case _ => false
@@ -1130,244 +1453,100 @@ defined class Foo */
// }
// }
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = if (optimizingCodeGen) {
- def unfold(tms: List[TreeMaker], currLabel: Option[Symbol] = None, nextLabel: Option[Symbol] = None): List[CaseDef] = tms match {
- // constant
- case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => import CODE._
- @inline
- def substedBody = btm.substitution(body)
- val labelledBody = currLabel match {
- case None => substedBody // currLabel.isEmpty implies nextLabel.isEmpty
- case Some(myLabel) =>
- LabelDef(myLabel, Nil,
- nextLabel match {
- case None => substedBody
- case Some(next) => ID(next) APPLY ()
- }
- )
- }
- List(CaseDef(const, EmptyTree, labelledBody))
-
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: bodyTm :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty)
- val labels = altss map { alts =>
- Some(freshSym(NoPosition, MethodType(Nil, pt), "$alt$") setFlag (METHOD | LABEL))
- }
-
- val caseDefs = (altss, labels, labels.tail :+ None).zipped.map { case (alts, currLabel, nextLabel) =>
- unfold(alts :+ bodyTm, currLabel, nextLabel)
- }
+ private val switchableTpes = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
- if (caseDefs exists (_.isEmpty)) Nil
- else caseDefs.flatten
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = {
+ def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ if (xs exists (_.isEmpty)) None else Some(xs.flatten)
- case _ => Nil // failure
+ def isSwitchableTpe(tpe: Type): Boolean =
+ switchableTpes contains tpe
+ def switchableConstToInt(x: Tree): Tree = {
+ val Literal(const) = x
+ const.tag match {
+ case IntTag => x
+ case ByteTag | ShortTag | CharTag => Literal(Constant(const.intValue))
+ }
}
val caseDefs = cases map { makers =>
removeSubstOnly(makers) match {
// default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches)
case (btm@BodyTreeMaker(body, _)) :: Nil =>
- List(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body)))
- case nonTrivialMakers =>
- unfold(nonTrivialMakers)
- }
- }
-
- if (caseDefs exists (_.isEmpty)) None
- else { import CODE._
- val matcher = BLOCK(
- VAL(scrutSym) === scrut, // TODO: type test for switchable type if patterns allow switch but the scrutinee doesn't
- Match(REF(scrutSym), caseDefs.flatten) // match on scrutSym, not scrut to avoid duplicating scrut
- )
-
- // matcher filter (tree => tree.tpe == null) foreach println
- // treeBrowser browse matcher
- Some(matcher) // set type to avoid recursion in typedMatch
- }
- } else None
-
- def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] =
- doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
-
- // calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){
- val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
-
- emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{
- var toHoist = List[Tree]()
- val (matcher, hasDefault) =
- if (casesUnOpt nonEmpty) {
- // when specified, need to propagate pt explicitly (type inferencer can't handle it)
- val optPt =
- if (isFullyDefined(pt)) inMatchMonad(pt)
- else NoType
-
- // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
- // exhaustivity and reachability must be checked before optimization as well
- val hasDefault = casesUnOpt.nonEmpty && {
- val nonTrivLast = casesUnOpt.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ Some(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body)))
+ // constant
+ case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil =>
+ Some(CaseDef(switchableConstToInt(const), EmptyTree, btm.substitution(body)))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty)
+ val caseConstants = altss map {
+ case EqualityTestTreeMaker(_, const@SwitchablePattern(), _) :: Nil =>
+ Some(switchableConstToInt(const))
+ case _ =>
+ None
}
- val cases =
- if (optimizingCodeGen) optimizeCases(scrutSym, casesUnOpt, pt)
- else casesUnOpt
-
- val combinedCases =
- cases.map(combineExtractors(_, pt)).reduceLeft(pmgen.typedOrElse(optPt))
-
- toHoist = (for (treeMakers <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList
-
- (pmgen.fun(scrutSym, combinedCases), hasDefault)
- } else (pmgen.zero, false)
-
- val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType, hasDefault)
- if (toHoist isEmpty) expr
- else Block(toHoist, expr)
- }
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree =
- treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt))
-
-
-
- // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
- // TODO: assign more fine-grained positions
- // fixes symbol nesting, assigns positions
- private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
- currentOwner = origOwner
-
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- }
- t match {
- case Function(_, _) if t.symbol == NoSymbol =>
- t.symbol = currentOwner.newValue(t.pos, nme.ANON_FUN_NAME).setFlag(SYNTHETIC).setInfo(NoType)
- // println("new symbol for "+ (t, t.symbol.ownerChain))
- case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
- t.symbol.owner = currentOwner
- case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
- assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner)
- d.symbol.lazyAccessor.owner = currentOwner
+ sequence(caseConstants) map { contants =>
+ val substedBody = btm.substitution(body)
+ CaseDef(Alternative(contants), EmptyTree, substedBody)
}
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
-
- d.symbol.owner = currentOwner
- // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
case _ =>
+ None //failure (can't translate pattern to a switch)
}
- super.traverse(t)
}
- // override def apply
- // println("before fixerupper: "+ xTree)
- // currentRun.trackerFactory.snapshot()
- // println("after fixerupper")
- // currentRun.trackerFactory.snapshot()
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// substitution
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- object Substitution {
- def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
- }
-
- class Substitution(val from: List[Symbol], val to: List[Tree]) {
- def apply(tree: Tree): Tree = typedSubst(tree, from, to)
-
- // the substitution that chains `other` before `this` substitution
- // forall t: Tree. this(other(t)) == (this >> other)(t)
- def >>(other: Substitution): Substitution = {
- val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
- new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ if (!isSwitchableTpe(scrut.tpe))
+ None // TODO: emit a cast of the scrutinee and a switch on the cast scrutinee if patterns allow switch but the type of the scrutinee doesn't
+ else {
+ sequence(caseDefs) map { caseDefs =>
+ import CODE._
+ val caseDefsWithDefault = {
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+ val hasDefault = caseDefs exists isDefault
+ if (hasDefault) caseDefs else {
+ val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) }
+ caseDefs :+ default
+ }
+ }
+ val matcher = BLOCK(
+ if (scrut.tpe != IntClass.tpe) {
+ scrutSym setInfo IntClass.tpe
+ VAL(scrutSym) === (scrut DOT nme.toInt)
+ } else {
+ VAL(scrutSym) === scrut
+ },
+ Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut
+ )
+ // matcher filter (tree => tree.tpe == null) foreach println
+ // treeBrowser browse matcher
+ matcher // set type to avoid recursion in typedMatch
+ }
}
- override def toString = (from zip to) mkString("Substitution(", ", ", ")")
- }
-
- object EmptySubstitution extends Substitution(Nil, Nil) {
- override def apply(tree: Tree): Tree = tree
- override def >>(other: Substitution): Substitution = other
- }
-
-
- def matchingMonadType: Type
- def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x"): Symbol
- def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree
- def typeTest(binder: Symbol, pt: Type): Tree
-
- // codegen relevant to the structure of the translation (how extractors are combined)
- trait AbsCodeGen { import CODE.UNIT
- def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree
- def flatMap(a: Tree, b: Tree): Tree
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
- def fun(arg: Symbol, body: Tree): Tree
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree
- def zero: Tree
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
- def condOptimized(c: Tree, then: Tree): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
- def _asInstanceOf(b: Symbol, tp: Type): Tree
- def mkZero(tp: Type): Tree
}
-
- def pmgen: AbsCodeGen
- def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator
}
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// generate actual trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- trait MatchCodeGen extends TreeMakers {
- lazy val pmgen: CommonCodeGen with MatchingStrategyGen with MonadInstGen =
- if (optimizingCodeGen) (new CommonCodeGen with OptimizedCodeGen {})
- else (new CommonCodeGen with MatchingStrategyGen with MonadInstGen {})
-
- import CODE._
-
- trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- // methods in MatchingStrategy (the monad companion) -- used directly in translation
- def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutTp, resTp) APPLY (scrut) APPLY (matcher) // matchingStrategy.runOrElse(scrut)(matcher)
- // *only* used to wrap the RHS of a body (isDefinedAt synthesis relies on this)
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (matchingStrategy DOT vpmName.one) (_asInstanceOf(res, bodyPt, force = true)) // matchingStrategy.one(res), like one, but blow this one away for isDefinedAt (since it's the RHS of a case)
- def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero
- def guard(c: Tree, then: Tree, tp: Type): Tree = genTypeApply((matchingStrategy DOT vpmName.guard), repackExistential(tp)) APPLY (c, then) // matchingStrategy.guard[tp](c, then)
- }
+ trait OptimizedMatchMonadInterface extends MatchMonadInterface {
+ override def inMatchMonad(tp: Type): Type = optionType(tp)
+ override def pureType(tp: Type): Type = tp
+ override protected def matchMonadSym = OptionClass
+ }
- trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- // methods in the monad instance -- used directly in translation
- def flatMap(a: Tree, b: Tree): Tree = (a DOT vpmName.flatMap)(b)
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase)
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ override def codegen: AbsCodegen = optimizedCodegen
- // TODO: the trees generated by flatMapCond and flatMapGuard may need to be distinguishable by exhaustivity checking -- they aren't right now
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol,
- nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), fun(nextBinder, next))
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, UnitClass.tpe), UnitClass.tpe, next)
- }
+ // trait AbsOptimizedCodegen extends AbsCodegen {
+ // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ // }
+ // def optimizedCodegen: AbsOptimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
- // `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
- // this is a special instance of the advanced inlining optimization that takes a method call on
- // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- // this trait overrides ALL of the methods of MatchingStrategyGen with MonadInstGen
- trait OptimizedCodeGen extends CommonCodeGen with MatchingStrategyGen with MonadInstGen {
+ // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen /*with AbsOptimizedCodegen*/ { import CODE._
lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero")
/** Inline runOrElse and get rid of Option allocations
@@ -1379,23 +1558,22 @@ defined class Foo */
@inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
- override def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean) = matcher match {
- case Function(List(x: ValDef), body) =>
- matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
- if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
- BLOCK(
- VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
- VAL(x.symbol) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for x.symbol in body -- the owner structure is repaired by fixerUpper
- VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
- VAL(keepGoing) === TRUE,
- body,
- if(hasDefault) REF(matchRes)
- else (IF (REF(keepGoing)) THEN MATCHERROR(REF(x.symbol)) ELSE REF(matchRes))
- )
+ def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = {
+ matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
+ if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
+ BLOCK(
+ VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
+ VAL(scrutSym) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for scrutSym in `matcher` -- the owner structure is repaired by fixerUpper
+ VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
+ VAL(keepGoing) === TRUE,
+ matcher,
+ if(hasDefault) REF(matchRes)
+ else (IF (REF(keepGoing)) THEN MATCHERROR(REF(scrutSym)) ELSE REF(matchRes))
+ )
}
// only used to wrap the RHS of a body
- override def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
BLOCK(
if (dontStore(matchPt)) res // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
else (REF(matchRes) === res), // _asInstanceOf(res, tp.widen, force = true)
@@ -1404,195 +1582,57 @@ defined class Foo */
)
}
- override def zero: Tree = REF(zeroSym)
+ def zero: Tree = REF(zeroSym)
- // guard is only used by flatMapCond and flatMapGuard, which are overridden
- override def guard(c: Tree, then: Tree, tp: Type): Tree = throw new NotImplementedError("guard is never called by optimizing codegen")
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
- override def flatMap(opt: Tree, fun: Tree): Tree = fun match {
- case Function(List(x: ValDef), body) =>
- val tp = inMatchMonad(x.symbol.tpe)
- val vs = freshSym(opt.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
- val v = VAL(vs) === opt
-
- BLOCK(
- v,
- IF (vs DOT isEmpty) THEN zero ELSE typedSubst(body, List(x.symbol), List(vs DOT get)) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor)
- )
- case _ => println("huh?")
- (opt DOT vpmName.flatMap)(fun)
+ BLOCK(
+ VAL(prevSym) === prev,
+ IF (prevSym DOT isEmpty) THEN zero ELSE Substitution(b, prevSym DOT get)(next) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor)
+ )
}
- override def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
+ def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
BLOCK(
thisCase,
IF (REF(keepGoing)) THEN elseCase ELSE zero // leave trailing zero for now, otherwise typer adds () anyway
)
}
- override def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree =
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree =
IF (cond) THEN BLOCK(
VAL(nextBinder) === res,
next
) ELSE zero
- override def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- IF (guardTree) THEN next ELSE zero
- }
-
- @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
- if (origTp == null || origTp == NoType) to
- // important: only type when actually substing and when original tree was typed
- // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
- else typed(to, EXPRmode, WildcardType)
-
- // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
- // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
- def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree = {
- // according to -Ystatistics 10% of translateMatch's time is spent in this method...
- // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
- override def transform(tree: Tree): Tree = {
- def subst(from: List[Symbol], to: List[Tree]): Tree =
- if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe)
- else subst(from.tail, to.tail)
-
- tree match {
- case Ident(_) => subst(from, to)
- case _ => super.transform(tree)
- }
- }
- }).transform(tree)
- }
-
- var ctr = 0
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1;
- // assert(owner ne null)
- // assert(owner ne NoSymbol)
- new TermSymbol(NoSymbol, pos, vpmName.counted(prefix, ctr)) setInfo repackExistential(tp)
- }
-
- def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
- case _ => tp
- }
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ IF (cond) THEN BLOCK(
+ condSym === TRUE,
+ nextBinder === res,
+ next
+ ) ELSE zero
- object vpmName {
- val one = "one".toTermName
- val drop = "drop".toTermName
- val flatMap = "flatMap".toTermName
- val get = "get".toTermName
- val guard = "guard".toTermName
- val isEmpty = "isEmpty".toTermName
- val orElse = "orElse".toTermName
- val outer = "<outer>".toTermName
- val runOrElse = "runOrElse".toTermName
- val zero = "zero".toTermName
-
- def counted(str: String, i: Int) = (str+i).toTermName
- def tupleIndex(i: Int) = ("_"+i).toTermName
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ IF (guardTree) THEN next ELSE zero
}
+ }
- def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
-
- trait CommonCodeGen extends AbsCodeGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT vpmName.tupleIndex(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
- def and(a: Tree, b: Tree): Tree = a AND b
- def condOptimized(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
-
- // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp)
- if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX)
- else gen.mkAsInstanceOf(t, tpX, true, false)
- }
-
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false)
- // { val tpX = repackExistential(tp)
- // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
- // else gen.mkIsInstanceOf(REF(b), tpX, true, false)
- // }
-
- def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp)
- if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
- else gen.mkAsInstanceOf(REF(b), tpX, true, false)
- }
-
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
- }
+ trait MatchOptimizations extends CommonSubconditionElimination
+ with DeadCodeElimination
+ with SwitchEmission
+ with OptimizedCodegen { self: TreeMakers =>
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
+ val toHoist = (
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList
+ (optCases, toHoist)
}
-
- def matchingStrategy: Tree
}
}
-
-// object noShadowedUntyped extends Traverser {
-// override def traverse(t: Tree) {
-// if ((t.tpe ne null) && (t.tpe ne NoType)) okTree = t
-// else if(okTree ne null) println("untyped subtree "+ t +" in typed tree"+ okTree +" : "+ okTree.tpe)
-// super.traverse(t)
-// }
-// var okTree: Tree = null
-// }
-// private def c(t: Tree): Tree = noShadowedUntyped(t)
-
- // def approximateTreeMaker(tm: TreeMaker): List[Test] = tm match {
- // case ExtractorTreeMaker(extractor, _, _) => HavocTest
- // case FilteredExtractorTreeMaker(extractor, lenGuard, _, _) => HavocTest
- // case ProductExtractorTreeMaker(testedBinder, lenGuard, _) => TopTest // TODO: (testedBinder ne null) and lenGuard
- //
- // // cond = typeTest(prevBinder, nextBinderTp)
- // // res = pmgen._asInstanceOf(prevBinder, nextBinderTp)
- // case TypeTestTreeMaker(testedBinder, pt, _) =>
- //
- // // cond = typeAndEqualityTest(patBinder, pt)
- // // res = pmgen._asInstanceOf(patBinder, nextBinderTp)
- // case TypeAndEqualityTestTreeMaker(_, testedBinder, pt, _) =>
- //
- // // cond = pmgen._equals(patTree, prevBinder)
- // // res = CODE.REF(prevBinder)
- // case EqualityTestTreeMaker(testedBinder, rhs, _) =>
- //
- // case AlternativesTreeMaker(_, alts: *) =>
- //
- // case GuardTreeMaker(guardTree) =>
- // }
-
- // // TODO: it's not exactly sound to represent an unapply-call by its symbol... also need to consider the prefix, like the outer-test (can this be captured as the path to this test?)
- // type ExtractorRepr = Symbol
- //
- // // TODO: we're undoing tree-construction that we ourselves performed earlier -- how about not-doing so we don't have to undo?
- // private def findBinderArgOfApply(extractor: Tree, unappSym: Symbol): Symbol = {
- // class CollectTreeTraverser[T](pf: PartialFunction[Tree => T]) extends Traverser {
- // val hits = new ListBuffer[T]
- // override def traverse(t: Tree) {
- // if (pf.isDefinedAt(t)) hits += pf(t)
- // super.traverse(t)
- // }
- // }
- // val trav = new CollectTreeTraverser{ case Apply(unapp, List(arg)) if unapp.symbol eq unappSym => arg.symbol}
- // trav.traverse(extractor)
- // trav.hits.headOption getOrElse NoSymbol
- // }
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 98f1c96cad..e313edb3f6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -150,49 +150,63 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// Override checking ------------------------------------------------------------
+
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
- val self = clazz.thisType
- val bridges = new ListBuffer[Tree]
-
- def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- val bridge = member.cloneSymbolImpl(clazz)
- .setPos(clazz.pos).setFlag(member.flags | VBRIDGE)
- bridge.setInfo(bridgetpe.cloneInfo(bridge))
- clazz.info.decls enter bridge
- val List(params) = bridge.paramss
- val TypeRef(_, JavaRepeatedParamClass, List(elemtp)) = params.last.tpe
- val (initargs, List(lastarg0)) = (params map Ident) splitAt (params.length - 1)
- val lastarg = gen.wildcardStar(gen.mkWrapArray(lastarg0, elemtp))
- val body = Apply(Select(This(clazz), member), initargs ::: List(lastarg))
- localTyper.typed {
- /*util.trace("generating varargs bridge")*/(DefDef(bridge, body))
+ // This is quite expensive, so attempt to skip it completely.
+ // Insist there at least be a java-defined ancestor which
+ // defines a varargs method. TODO: Find a cheaper way to exclude.
+ if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
+ val self = clazz.thisType
+ val bridges = new ListBuffer[Tree]
+
+ def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
+ log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+
+ val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
+ bridge.setInfo(bridgetpe.cloneInfo(bridge))
+ clazz.info.decls enter bridge
+
+ val params = bridge.paramss.head
+ val elemtp = params.last.tpe.typeArgs.head
+ val idents = params map Ident
+ val lastarg = gen.wildcardStar(gen.mkWrapArray(idents.last, elemtp))
+ val body = Apply(Select(This(clazz), member), idents.init :+ lastarg)
+
+ localTyper typed DefDef(bridge, body)
}
- }
-
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
- // if a method with type `jtpe` exists and that method is not a varargs bridge
- // then create a varargs bridge of type `jtpe` that forwards to the
- // member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
- if (!(member hasFlag DEFERRED) && hasRepeatedParam(member.info)) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
- val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE) filter (
- sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)
- // this is a bit tortuous: we look for non-private members or bridges
- // if we find a bridge everything is OK. If we find another member,
- // we need to create a bridge
- )
- if (inherited.exists) {
- bridges += varargBridge(member, jtpe)
+
+ // For all concrete non-private members that have a (Scala) repeated parameter:
+ // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // if a method with type `jtpe` exists and that method is not a varargs bridge
+ // then create a varargs bridge of type `jtpe` that forwards to the
+ // member method with the Scala vararg type.
+ for (member <- clazz.info.nonPrivateMembers) {
+ if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
+ val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+ // Delaying calling memberType as long as possible
+ if (inherited ne NoSymbol) {
+ val jtpe = toJavaRepeatedParam(self.memberType(member))
+ // this is a bit tortuous: we look for non-private members or bridges
+ // if we find a bridge everything is OK. If we find another member,
+ // we need to create a bridge
+ if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ bridges += varargBridge(member, jtpe)
+ }
}
}
+
+ bridges.toList
}
-
- bridges.toList
+ else Nil
}
/** 1. Check all members of class `clazz` for overriding conditions.
@@ -262,6 +276,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
* of class `clazz` are met.
*/
def checkOverride(member: Symbol, other: Symbol) {
+ debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
+
def memberTp = self.memberType(member)
def otherTp = self.memberType(other)
def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
@@ -317,21 +333,22 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// return if we already checked this combination elsewhere
if (member.owner != clazz) {
- if ((member.owner isSubClass other.owner) && (member.isDeferred || !other.isDeferred)) {
+ def deferredCheck = member.isDeferred || !other.isDeferred
+ def subOther(s: Symbol) = s isSubClass other.owner
+ def subMember(s: Symbol) = s isSubClass member.owner
+
+ if (subOther(member.owner) && deferredCheck) {
//Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
- return;
+ return
+ }
+ if (clazz.parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) {
+ //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
+ return
+ }
+ if (clazz.parentSymbols forall (p => subOther(p) == subMember(p))) {
+ //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
+ return
}
- if (clazz.info.parents exists (parent =>
- (parent.typeSymbol isSubClass other.owner) && (parent.typeSymbol isSubClass member.owner) &&
- (member.isDeferred || !other.isDeferred))) {
- //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
- return;
- }
- if (clazz.info.parents forall (parent =>
- (parent.typeSymbol isSubClass other.owner) == (parent.typeSymbol isSubClass member.owner))) {
- //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
- return;
- }
}
/** Is the intersection between given two lists of overridden symbols empty?
@@ -395,8 +412,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
-
-
def checkOverrideTypes() {
if (other.isAliasType) {
//if (!member.typeParams.isEmpty) (1.5) @MAT
@@ -405,14 +420,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// overrideError("may not override parameterized type");
// @M: substSym
- if( !(sameLength(member.typeParams, other.typeParams) && (self.memberType(member).substSym(member.typeParams, other.typeParams) =:= self.memberType(other))) ) // (1.6)
+ if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
overrideTypeError();
- } else if (other.isAbstractType) {
+ }
+ else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
-
- val memberTp = self.memberType(member)
val otherTp = self.memberInfo(other)
+
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
explainTypes(_.bounds containsType _, otherTp, memberTp)
@@ -431,6 +446,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// check a type alias's RHS corresponds to its declaration
// this overlaps somewhat with validateVariance
if(member.isAliasType) {
+ // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
if(!kindErrors.isEmpty)
@@ -659,9 +675,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
}
- val parents = bc.info.parents
- if (!parents.isEmpty && parents.head.typeSymbol.hasFlag(ABSTRACT))
- checkNoAbstractDecls(parents.head.typeSymbol)
+ if (bc.superClass hasFlag ABSTRACT)
+ checkNoAbstractDecls(bc.superClass)
}
checkNoAbstractMembers()
@@ -911,9 +926,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
- (tps zip tparams) foreach {
- case (tp, tparam) => validateVariance(tp, variance * tparam.variance)
- }
+ foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
}
validateVariance(base.info, CoVariance)
@@ -942,7 +955,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// Forward reference checking ---------------------------------------------------
class LevelInfo(val outer: LevelInfo) {
- val scope: Scope = if (outer eq null) new Scope else new Scope(outer.scope)
+ val scope: Scope = if (outer eq null) newScope else newNestedScope(outer.scope)
var maxindex: Int = Int.MinValue
var refpos: Position = _
var refsym: Symbol = _
@@ -1021,10 +1034,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
/** Symbols which limit the warnings we can issue since they may be value types */
val isMaybeValue = Set(AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
- // Whether def equals(other: Any) is overridden
- def isUsingDefaultEquals = {
+ // Whether def equals(other: Any) is overridden or synthetic
+ def isUsingWarnableEquals = {
val m = receiver.info.member(nme.equals_)
- (m == Object_equals) || (m == Any_equals)
+ (m == Object_equals) || (m == Any_equals) || (m.isSynthetic && m.owner.isCase)
}
// Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
def isUsingDefaultScalaOp = {
@@ -1032,7 +1045,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
(s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
}
// Whether the operands+operator represent a warnable combo (assuming anyrefs)
- def isWarnable = isReferenceOp || (isUsingDefaultEquals && isUsingDefaultScalaOp)
+ // Looking for comparisons performed with ==/!= in combination with either an
+ // equals method inherited from Object or a case class synthetic equals (for
+ // which we know the logic.)
+ def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
@@ -1092,7 +1108,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- if (nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
if (actual isSubClass receiver) ()
else if (receiver isSubClass actual) ()
// warn only if they have no common supertype below Object
@@ -1153,12 +1169,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
def createStaticModuleAccessor() = atPhase(phase.next) {
val method = (
- sym.owner.newMethod(sym.pos, sym.name.toTermName)
- setFlag (sym.flags | STABLE)
- resetFlag MODULE
- setInfo NullaryMethodType(sym.moduleClass.tpe)
+ sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
+ setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
)
- sym.owner.info.decls enter method
localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
}
def createInnerModuleAccessor(vdef: Tree) = List(
@@ -1230,11 +1243,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
/* Check whether argument types conform to bounds of type parameters */
- private def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit =
- try typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "")
+ private def checkBounds(tree0: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type]): Unit =
+ try typer.infer.checkBounds(tree0, pre, owner, tparams, argtps, "")
catch {
case ex: TypeError =>
- unit.error(pos, ex.getMessage());
+ unit.error(tree0.pos, ex.getMessage())
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
(argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
@@ -1334,7 +1347,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// types of the value parameters
- member.paramss.flatten foreach (p => checkAccessibilityOfType(p.tpe))
+ mapParamss(member)(p => checkAccessibilityOfType(p.tpe))
// upper bounds of type parameters
member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
@@ -1364,22 +1377,22 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
false
}
- private def checkTypeRef(tp: Type, pos: Position) = tp match {
+ private def checkTypeRef(tp: Type, tree: Tree) = tp match {
case TypeRef(pre, sym, args) =>
- checkDeprecated(sym, pos)
+ checkDeprecated(sym, tree.pos)
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
if (!tp.isHigherKinded)
- checkBounds(pre, sym.owner, sym.typeParams, args, pos)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
case _ =>
}
- private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach (tp => checkTypeRef(tp, tree))
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
- checkAnnotations(annots map (_.atp), tree.pos)
+ checkAnnotations(annots map (_.atp), tree)
transformTrees(annots flatMap (_.args))
}
@@ -1394,7 +1407,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case tpt@TypeTree() =>
if(tpt.original != null) {
tpt.original foreach {
- case dc@TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416
+ case dc@TypeTreeWithDeferredRefCheck() =>
+ applyRefchecksToAnnotations(dc.check()) // #2416
case _ =>
}
}
@@ -1440,7 +1454,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
unit.error(tree.pos, "too many dimensions for array creation")
Literal(Constant(null))
} else {
- localTyper.getManifestTree(tree.pos, etpe, false)
+ localTyper.getManifestTree(tree, etpe, false)
}
}
val newResult = localTyper.typedPos(tree.pos) {
@@ -1480,7 +1494,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
def checkSuper(mix: Name) =
// term should have been eliminated by super accessors
- assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY))
+ assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY), (qual.symbol, sym, mix))
transformCaseApply(tree,
qual match {
@@ -1549,7 +1563,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
checkOverloadedRestrictions(currentOwner)
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
-
if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
else tree
@@ -1569,13 +1582,13 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
case ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
case t: TypeRef =>
- checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree.pos)
+ checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree)
case _ =>
}
tree
case TypeApply(fn, args) =>
- checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
+ checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe))
transformCaseApply(tree, ())
case x @ Apply(_, _) =>
@@ -1632,7 +1645,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
result
} catch {
case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace();
+ if (settings.debug.value) ex.printStackTrace()
unit.error(tree.pos, ex.getMessage())
tree
} finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index c9991614e4..0ab09b4fec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -43,7 +43,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
private def transformArgs(params: List[Symbol], args: List[Tree]) = {
- treeInfo.zipMethodParamsAndArgs(params, args) map { case (param, arg) =>
+ treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) =>
if (isByNameParamType(param.tpe))
withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
@@ -51,12 +51,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
+ def typeError(typer: analyzer.Typer, pos: Position, found: Type, req: Type) {
+ if (!found.isErroneous && !req.isErroneous) {
+ val msg = analyzer.ErrorUtils.typeErrorMsg(found, req, typer.infer.isPossiblyMissingArgs(found, req))
+ typer.context.error(pos, analyzer.withAddendum(pos)(msg))
+ if (settings.explaintypes.value)
+ explainTypes(found, req)
+ }
+ }
+
if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
val packed = localTyper.packedType(tree, NoSymbol)
if (!(packed <:< pt)) {
val errorContext = localTyper.context.make(localTyper.context.tree)
- errorContext.reportGeneralErrors = true
- analyzer.newTyper(errorContext).infer.typeError(tree.pos, packed, pt)
+ errorContext.setReportErrors()
+ typeError(analyzer.newTyper(errorContext), tree.pos, packed, pt)
}
}
tree
@@ -97,18 +106,13 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
var superAcc = clazz.info.decl(supername).suchThat(_.alias == sym)
if (superAcc == NoSymbol) {
debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- superAcc =
- clazz.newMethod(tree.pos, supername)
- .setFlag(SUPERACCESSOR | PRIVATE)
- .setAlias(sym)
+ superAcc = clazz.newMethod(supername, tree.pos, SUPERACCESSOR | PRIVATE) setAlias sym
var superAccTpe = clazz.thisType.memberType(sym)
if (sym.isModule && !sym.isMethod) {
// the super accessor always needs to be a method. See #231
superAccTpe = NullaryMethodType(superAccTpe)
}
- superAcc.setInfo(superAccTpe.cloneInfo(superAcc))
- //println("creating super acc "+superAcc+":"+superAcc.tpe)//DEBUG
- clazz.info.decls enter superAcc
+ superAcc setInfoAndEnter (superAccTpe cloneInfo superAcc)
storeAccessorDefinition(clazz, DefDef(superAcc, EmptyTree))
}
atPos(sup.pos) {
@@ -312,31 +316,29 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
memberType.cloneInfo(protAcc).asSeenFrom(qual.tpe, sym.owner))
}
- var protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s))
- if (protAcc == NoSymbol) {
- protAcc = clazz.newMethod(tree.pos, nme.protName(sym.originalName))
- protAcc.setInfo(accType(protAcc))
- clazz.info.decls.enter(protAcc);
+ val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
+ val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+ newAcc setInfoAndEnter accType(newAcc)
- val code = DefDef(protAcc, {
- val (receiver :: _) :: tail = protAcc.paramss
+ val code = DefDef(newAcc, {
+ val (receiver :: _) :: tail = newAcc.paramss
val base: Tree = Select(Ident(receiver), sym)
- val allParamTypes = sym.tpe.paramss map (xs => xs map (_.tpe))
-
- (tail zip allParamTypes).foldLeft(base) {
- case (fn, (params, tpes)) =>
- Apply(fn, params zip tpes map { case (p, tp) => makeArg(p, receiver, tp) })
- }
+ val allParamTypes = mapParamss(sym)(_.tpe)
+ val args = map2(tail, allParamTypes)((params, tpes) => map2(params, tpes)(makeArg(_, receiver, _)))
+ args.foldLeft(base)(Apply(_, _))
})
debuglog("" + code)
storeAccessorDefinition(clazz, code)
+ newAcc
}
- var res: Tree = atPos(tree.pos) {
- if (targs.head == EmptyTree)
- Apply(Select(This(clazz), protAcc), List(qual))
- else
- Apply(TypeApply(Select(This(clazz), protAcc), targs), List(qual))
+ val selection = Select(This(clazz), protAcc)
+ def mkApply(fn: Tree) = Apply(fn, qual :: Nil)
+ val res = atPos(tree.pos) {
+ targs.head match {
+ case EmptyTree => mkApply(selection)
+ case _ => mkApply(TypeApply(selection, targs))
+ }
}
debuglog("Replaced " + tree + " with " + res)
if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
@@ -374,25 +376,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = hostForAccessorOf(field, currentOwner.enclClass)
assert(clazz != NoSymbol, field)
debuglog("Decided for host class: " + clazz)
+
val accName = nme.protSetterName(field.originalName)
- var protAcc = clazz.info.decl(accName)
- if (protAcc == NoSymbol) {
- protAcc = clazz.newMethod(field.pos, nme.protSetterName(field.originalName))
- protAcc.setInfo(MethodType(protAcc.newSyntheticValueParams(List(clazz.typeOfThis, field.tpe)),
- UnitClass.tpe))
- clazz.info.decls.enter(protAcc)
- val code = DefDef(protAcc, {
- val obj :: value :: Nil = protAcc.paramss.head
- atPos(tree.pos) {
- Assign(
- Select(Ident(obj), field.name),
- Ident(value))
- }
- })
- debuglog("" + code)
- storeAccessorDefinition(clazz, code)
+ val protectedAccessor = clazz.info decl accName orElse {
+ val protAcc = clazz.newMethod(accName, field.pos)
+ val paramTypes = List(clazz.typeOfThis, field.tpe)
+ val params = protAcc newSyntheticValueParams paramTypes
+ val accessorType = MethodType(params, UnitClass.tpe)
+
+ protAcc setInfoAndEnter accessorType
+ val obj :: value :: Nil = params
+ storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value))))
+
+ protAcc
}
- atPos(tree.pos)(Select(This(clazz), protAcc))
+ atPos(tree.pos)(Select(This(clazz), protectedAccessor))
}
/** Does `sym` need an accessor when accessed from `currentOwner`?
@@ -455,7 +453,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
if (referencingClass.isSubClass(sym.owner.enclClass)
|| referencingClass.thisSym.isSubClass(sym.owner.enclClass)
|| referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) {
- assert(referencingClass.isClass)
+ assert(referencingClass.isClass, referencingClass)
referencingClass
} else if(referencingClass.owner.enclClass != NoSymbol)
hostForAccessorOf(sym, referencingClass.owner.enclClass)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index cf8c0c596c..cf90577959 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -36,162 +36,13 @@ trait SyntheticMethods extends ast.TreeDSL {
import definitions._
import CODE._
- private object util {
- private type CM[T] = ClassManifest[T]
-
- lazy val IteratorModule = getModule("scala.collection.Iterator")
- lazy val Iterator_apply = getMember(IteratorModule, nme.apply)
- def iteratorOfType(tp: Type) = appliedType(IteratorClass.typeConstructor, List(tp))
-
- def ValOrDefDef(sym: Symbol, body: Tree) =
- if (sym.isLazy) ValDef(sym, body)
- else DefDef(sym, body)
-
- /** To avoid unchecked warnings on polymorphic classes.
- */
- def clazzTypeToTest(clazz: Symbol) = clazz.tpe.normalize match {
- case TypeRef(_, sym, args) if args.nonEmpty => ExistentialType(sym.typeParams, clazz.tpe)
- case tp => tp
- }
-
- def makeMethodPublic(method: Symbol): Symbol = (
- method setPrivateWithin NoSymbol resetFlag AccessFlags
- )
-
- def methodArg(method: Symbol, idx: Int): Tree = Ident(method.paramss.head(idx))
-
- private def applyTypeInternal(manifests: List[CM[_]]): Type = {
- val symbols = manifests map manifestToSymbol
- val container :: args = symbols
- val tparams = container.typeConstructor.typeParams
-
- // Overly conservative at present - if manifests were more usable
- // this could do a lot more.
- require(symbols forall (_ ne NoSymbol), "Must find all manifests: " + symbols)
- require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
- require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
- require(args forall (_.typeConstructor.typeParams.isEmpty), "Arguments must be unparameterized: " + args)
-
- typeRef(container.typeConstructor.prefix, container, args map (_.tpe))
- }
-
- def manifestToSymbol(m: CM[_]): Symbol = m match {
- case x: scala.reflect.AnyValManifest[_] => definitions.getClass("scala." + x)
- case _ => getClassIfDefined(m.erasure.getName)
- }
- def companionType[T](implicit m: CM[T]) =
- getModule(m.erasure.getName).tpe
-
- // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
- def applyType[M](implicit m1: CM[M]): Type =
- applyTypeInternal(List(m1))
-
- def applyType[M[X1], X1](implicit m1: CM[M[_]], m2: CM[X1]): Type =
- applyTypeInternal(List(m1, m2))
-
- def applyType[M[X1, X2], X1, X2](implicit m1: CM[M[_,_]], m2: CM[X1], m3: CM[X2]): Type =
- applyTypeInternal(List(m1, m2, m3))
-
- def applyType[M[X1, X2, X3], X1, X2, X3](implicit m1: CM[M[_,_,_]], m2: CM[X1], m3: CM[X2], m4: CM[X3]): Type =
- applyTypeInternal(List(m1, m2, m3, m4))
- }
- import util._
-
- class MethodSynthesis(val clazz: Symbol, localTyper: Typer) {
- private def isOverride(method: Symbol) =
- clazzMember(method.name).alternatives exists (sym => (sym != method) && !sym.isDeferred)
-
- private def setMethodFlags(method: Symbol): Symbol = {
- val overrideFlag = if (isOverride(method)) OVERRIDE else 0L
-
- method setFlag (overrideFlag | SYNTHETIC) resetFlag DEFERRED
- }
-
- private def finishMethod(method: Symbol, f: Symbol => Tree): Tree = {
- setMethodFlags(method)
- clazz.info.decls enter method
- logResult("finishMethod")(localTyper typed ValOrDefDef(method, f(method)))
- }
-
- private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
- val m = clazz.newMethod(clazz.pos.focus, name.toTermName)
- m setInfo info
- finishMethod(m, f)
- }
- private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
- val m = clazz.newMethod(clazz.pos.focus, name.toTermName)
- m setInfo infoFn(m)
- finishMethod(m, f)
- }
- private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
- val m = original.cloneSymbol(clazz) setPos clazz.pos.focus
- m.name = name
- finishMethod(m, f)
- }
-
- private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
- cloneInternal(original, f, original.name)
-
- def clazzMember(name: Name) = clazz.info nonPrivateMember name match {
- case NoSymbol => log("In " + clazz + ", " + name + " not found: " + clazz.info) ; NoSymbol
- case sym => sym
- }
- def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
-
- /** Function argument takes the newly created method symbol of
- * the same type as `name` in clazz, and returns the tree to be
- * added to the template.
- */
- def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
- overrideMethod(clazzMember(name))(f)
-
- def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
- cloneInternal(original, sym => f(sym setFlag OVERRIDE))
-
- def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
- cloneInternal(original, f, nameFn(original.name))
-
- def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree =
- createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType))
-
- def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree =
- createInternal(name, f, NullaryMethodType(returnType))
-
- def createMethod(original: Symbol)(f: Symbol => Tree): Tree =
- createInternal(original.name, f, original.info)
-
- def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree =
- createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
-
- def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
- createMethod(name, List(IntClass.tpe), returnType) { m =>
- val arg0 = methodArg(m, 0)
- val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
- val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
-
- Match(arg0, cases)
- }
- }
-
- // def foo() = constant
- def constantMethod(name: Name, value: Any): Tree = {
- val constant = Constant(value)
- createMethod(name, Nil, constant.tpe)(_ => Literal(constant))
- }
- // def foo = constant
- def constantNullary(name: Name, value: Any): Tree = {
- val constant = Constant(value)
- createMethod(name, constant.tpe)(_ => Literal(constant))
- }
- }
-
/** Add the synthetic methods to case classes.
*/
def addSyntheticMethods(templ: Template, clazz0: Symbol, context: Context): Template = {
if (phase.erasedTypes)
return templ
- val synthesizer = new MethodSynthesis(
+ val synthesizer = new ClassMethodSynthesis(
clazz0,
newTyper( if (reporter.hasErrors) context makeSilent false else context )
)
@@ -202,7 +53,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// in the original order.
def accessors = clazz.caseFieldAccessors sortBy { acc =>
originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name + "$").toTermName)
+ (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
}
}
val arity = accessors.size
@@ -216,16 +67,17 @@ trait SyntheticMethods extends ast.TreeDSL {
// like Manifests and Arrays which are not robust and infer things
// which they shouldn't.
val accessorLub = (
- if (opt.experimental)
+ if (opt.experimental) {
global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
}
+ }
else AnyClass.tpe
)
def forwardToRuntime(method: Symbol): Tree =
- forwardMethod(method, getMember(ScalaRunTimeModule, "_" + method.name toTermName))(This(clazz) :: _)
+ forwardMethod(method, getMember(ScalaRunTimeModule, method.name prepend "_"))(This(clazz) :: _)
// Any member, including private
def hasConcreteImpl(name: Name) =
@@ -238,14 +90,14 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
- clazzMember(name.toTermName).info match {
+ clazzMember(newTermName(name)).info match {
case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
case _ => default
}
}
def productIteratorMethod = {
createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
- gen.mkMethodCall(ScalaRunTimeModule, "typedProductIterator", List(accessorLub), List(This(clazz)))
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(This(clazz)))
)
}
def projectionMethod(accessor: Symbol, num: Int) = {
@@ -262,11 +114,10 @@ trait SyntheticMethods extends ast.TreeDSL {
/** The canEqual method for case classes.
* def canEqual(that: Any) = that.isInstanceOf[This]
*/
- def canEqualMethod: Tree = {
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
- methodArg(m, 0) IS_OBJ clazzTypeToTest(clazz)
- )
- }
+ def canEqualMethod: Tree = (
+ createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
+ )
/** The equality method for case classes.
* 0 args:
@@ -280,13 +131,13 @@ trait SyntheticMethods extends ast.TreeDSL {
* }
*/
def equalsClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
- val arg0 = methodArg(m, 0)
- val thatTest = gen.mkIsInstanceOf(arg0, clazzTypeToTest(clazz), true, false)
+ val arg0 = Ident(m.firstParam)
+ val thatTest = gen.mkIsInstanceOf(arg0, classExistentialType(clazz), true, false)
val thatCast = gen.mkCast(arg0, clazz.tpe)
def argsBody: Tree = {
val otherName = context.unit.freshTermName(clazz.name + "$")
- val otherSym = m.newValue(m.pos, otherName) setInfo clazz.tpe setFlag SYNTHETIC
+ val otherSym = m.newValue(otherName, m.pos, SYNTHETIC) setInfo clazz.tpe
val pairwise = accessors map (acc => fn(Select(This(clazz), acc), acc.tpe member nme.EQ, Select(Ident(otherSym), acc)))
val canEq = gen.mkMethodCall(otherSym, nme.canEqual_, Nil, List(This(clazz)))
def block = Block(ValDef(otherSym, thatCast), AND(pairwise :+ canEq: _*))
@@ -335,7 +186,7 @@ trait SyntheticMethods extends ast.TreeDSL {
Object_hashCode -> (() => constantMethod(nme.hashCode_, clazz.name.decode.hashCode)),
Object_toString -> (() => constantMethod(nme.toString_, clazz.name.decode))
// Not needed, as reference equality is the default.
- // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ methodArg(m, 0)))
+ // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam)))
)
/** If you serialize a singleton and then deserialize it twice,
@@ -385,7 +236,7 @@ trait SyntheticMethods extends ast.TreeDSL {
for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
val original = ddef.symbol
val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
- makeMethodPublic(newAcc)
+ newAcc.makePublic
newAcc resetFlag (ACCESSOR | PARAMACCESSOR)
ddef.rhs.duplicate
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index b0500776fe..ed263cbbef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -142,11 +142,11 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
- val unit0 = currentRun.currentUnit
+ val unit0 = currentUnit
currentRun.currentUnit = unit
body
currentRun.advanceUnit
- assertFn(currentRun.currentUnit == unit, "currentUnit is " + currentRun.currentUnit + ", but unit is " + unit)
+ assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
currentRun.currentUnit = unit0
}
def check(unit: CompilationUnit) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 080a802272..4f4087a953 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -39,8 +39,6 @@ trait TypeDiagnostics {
import definitions._
import global.typer.{ infer, context }
- private def currentUnit = currentRun.currentUnit
-
/** The common situation of making sure nothing is erroneous could be
* nicer if Symbols, Types, and Trees all implemented some common interface
* in which isErroneous and similar would be placed.
@@ -93,37 +91,6 @@ trait TypeDiagnostics {
}
}
- def notAMemberMessage(pos: Position, qual: Tree, name: Name) = {
- val owner = qual.tpe.typeSymbol
- val target = qual.tpe.widen
- def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
- def nameString = decodeWithKind(name, owner)
- /** Illuminating some common situations and errors a bit further. */
- def addendum = {
- val companion = {
- if (name.isTermName && owner.isPackageClass) {
- target.member(name.toTypeName) match {
- case NoSymbol => ""
- case sym => "\nNote: %s exists, but it has no companion object.".format(sym)
- }
- }
- else ""
- }
- val semicolon = (
- if (posPrecedes(qual.pos, pos))
- "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?"
- else
- ""
- )
- companion + semicolon
- }
-
- withAddendum(qual.pos)(
- if (name == nme.CONSTRUCTOR) target + " does not have a constructor"
- else nameString + " is not a member of " + targetKindString + target + addendum
- )
- }
-
/** An explanatory note to be added to error messages
* when there's a problem with abstract var defs */
def abstractVarMessage(sym: Symbol): String =
@@ -131,9 +98,6 @@ trait TypeDiagnostics {
"\n(Note that variables need to be initialized to be defined)"
else ""
- def notAMemberError(pos: Position, qual: Tree, name: Name) =
- context.error(pos, notAMemberMessage(pos, qual, name))
-
/** Only prints the parameter names if they're not synthetic,
* since "x$1: Int" does not offer any more information than "Int".
*/
@@ -154,21 +118,6 @@ trait TypeDiagnostics {
def alternativesString(tree: Tree) =
alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " <and>\n", "\n")
- def missingParameterTypeMsg(fun: Tree, vparam: ValDef, pt: Type) = {
- def anonMessage = (
- "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" +
- "\nExpected type was: " + pt.toLongString
- )
- val suffix =
- if (!vparam.mods.isSynthetic) ""
- else " for expanded function" + (fun match {
- case Function(_, Match(_, _)) => anonMessage
- case _ => " " + fun
- })
-
- "missing parameter type" + suffix
- }
-
/** The symbol which the given accessor represents (possibly in part).
* This is used for error messages, where we want to speak in terms
* of the actual declaration or definition, not in terms of the generated setters
@@ -181,10 +130,7 @@ trait TypeDiagnostics {
val getter = if (member.isSetter) member.getter(member.owner) else member
val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
- ( getter.owner.newValue(getter.pos, getter.name.toTermName)
- setInfo getter.tpe.resultType
- setFlag flags
- )
+ getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
}
def treeSymTypeMsg(tree: Tree): String = {
@@ -205,34 +151,6 @@ trait TypeDiagnostics {
else defaultMessage
}
- def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]): String = {
- val suffix = {
- if (missing.isEmpty) ""
- else {
- val keep = missing take 3 map (_.name)
- ".\nUnspecified value parameter%s %s".format(
- if (missing.tail.isEmpty) "" else "s",
- if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ")
- else keep.mkString("", ", ", ".")
- )
- }
- }
-
- "not enough arguments for " + treeSymTypeMsg(fun) + suffix
- }
-
- def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
- def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
-
- def resType = if (pt isWildcard) "" else " with expected result type " + pt
- def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
- def locals = alternatives(tree) flatMap (_.typeParams)
-
- withDisambiguation(locals, allTypes: _*) {
- treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType
- }
- }
-
def disambiguate(ss: List[String]) = ss match {
case Nil => Nil
case s :: ss => s :: (ss map { case `s` => "(some other)"+s ; case x => x })
@@ -449,8 +367,8 @@ trait TypeDiagnostics {
trait TyperDiagnostics {
self: Typer =>
- private def contextError(pos: Position, msg: String) = context.error(pos, msg)
- private def contextError(pos: Position, err: Throwable) = context.error(pos, err)
+ private def contextError(context0: Analyzer#Context, pos: Position, msg: String) = context0.error(pos, msg)
+ private def contextError(context0: Analyzer#Context, pos: Position, err: Throwable) = context0.error(pos, err)
private def contextWarning(pos: Position, msg: String) = context.unit.warning(pos, msg)
def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
@@ -469,14 +387,8 @@ trait TypeDiagnostics {
// Error suppression will squash some of these warnings unless we circumvent it.
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
- if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK) {
- val saved = context.reportGeneralErrors
- try {
- context.reportGeneralErrors = true
- context.warning(tree.pos, "dead code following this construct")
- }
- finally context.reportGeneralErrors = saved
- }
+ if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
+ context.warning(tree.pos, "dead code following this construct", true)
tree
}
@@ -488,8 +400,8 @@ trait TypeDiagnostics {
}
}
- def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
- def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive"
+ private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
+ private def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive"
/** Returns Some(msg) if the given tree is untyped apparently due
* to a cyclic reference, and None otherwise.
@@ -497,26 +409,38 @@ trait TypeDiagnostics {
def cyclicReferenceMessage(sym: Symbol, tree: Tree) = condOpt(tree) {
case ValDef(_, _, tpt, _) if tpt.tpe == null => "recursive "+sym+" needs type"
case DefDef(_, _, _, _, tpt, _) if tpt.tpe == null => List(cyclicAdjective(sym), sym, "needs result type") mkString " "
+ case Import(expr, selectors) =>
+ ( "encountered unrecoverable cycle resolving import." +
+ "\nNote: this is often due in part to a class depending on a definition nested within its companion." +
+ "\nIf applicable, you may wish to try moving some members into another object."
+ )
}
-
+
/** Report a type error.
*
* @param pos0 The position where to report the error
* @param ex The exception that caused the error
*/
- def reportTypeError(pos: Position, ex: TypeError) {
+ def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
if (ex.pos == NoPosition) ex.pos = pos
- if (!context.reportGeneralErrors) throw ex
+ // TODO: should be replaced by throwErrors
+ // but it seems that throwErrors excludes some of the errors that should actually be
+ // buffered, causing TypeErrors to fly around again. This needs some more investigation.
+ if (!context0.reportErrors) throw ex
if (settings.debug.value) ex.printStackTrace()
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
- contextError(ex.pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+ val pos = info.tree match {
+ case Import(expr, _) => expr.pos
+ case _ => ex.pos
+ }
+ contextError(context0, pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
if (sym == ObjectClass)
throw new FatalError("cannot redefine root "+sym)
case _ =>
- contextError(ex.pos, ex)
+ contextError(context0, ex.pos, ex)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index f6f783516c..ef69e1525e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -12,14 +12,12 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
import scala.tools.nsc.util.BatchSourceFile
import mutable.ListBuffer
import symtab.Flags._
import util.Statistics
import util.Statistics._
-import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
-import scala.tools.util.EditDistance.similarString
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -60,7 +58,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
super.traverse(tree)
}
}
-/* needed for experimental version where eraly types can be type arguments
+/* needed for experimental version where early types can be type arguments
class EarlyMap(clazz: Symbol) extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
@@ -71,6 +69,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
*/
+ sealed abstract class SilentResult[+T]
+ case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
+ case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
+
def newTyper(context: Context): Typer = new NormalTyper(context)
private class NormalTyper(context : Context) extends Typer(context)
@@ -80,9 +82,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
private def isPastTyper = phase.id > currentRun.typerPhase.id
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation {
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine }
+ import TyperErrorGen._
val infer = new Inferencer(context0) {
override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
@@ -97,20 +100,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case MethodType(params, _) =>
val argResultsBuff = new ListBuffer[SearchResult]()
val argBuff = new ListBuffer[Tree]()
+ var paramFailed = false
def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
var mkArg: (Tree, Name) => Tree = mkPositionalArg
- def errorMessage(paramName: Name, paramTp: Type) =
- paramTp.typeSymbol match {
- case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
- case _ =>
- "could not find implicit value for "+
- (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
- else "parameter "+paramName+": ")+paramTp
- }
-
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
// apply the substitutions (undet type param -> type) that were determined
@@ -120,15 +115,27 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = inferImplicit(fun, paramTp, true, false, context)
+ val res = if (paramFailed) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
argResultsBuff += res
if (res != SearchFailure) {
argBuff += mkArg(res.tree, param.name)
} else {
mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
- if (!param.hasDefault)
- context.error(fun.pos, errorMessage(param.name, param.tpe))
+ if (!param.hasDefault && !paramFailed) {
+ context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
+ case Some(divergentImplicit) =>
+ // DivergentImplicit error has higher priority than "no implicit found"
+ // no need to issue the problem again if we are still in silent mode
+ if (context.reportErrors) {
+ context.issue(divergentImplicit)
+ context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ }
+ case None =>
+ NoImplicitFoundError(fun, param)
+ }
+ paramFailed = true
+ }
/* else {
TODO: alternative (to expose implicit search failure more) -->
resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg
@@ -149,6 +156,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ErrorType =>
fun
}
+
+ def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
+ inferView(tree, from, to, reportAmbiguous, true)
/** Infer an implicit conversion (``view'') between two types.
* @param tree The tree which needs to be converted.
@@ -157,8 +167,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* @param reportAmbiguous Should ambiguous implicit errors be reported?
* False iff we search for a view to find out
* whether one type is coercible to another.
+ * @param saveErrors Should ambiguous and divergent implicit errors that were buffered
+ * during the inference of a view be put into the original buffer.
+ * False iff we don't care about them.
*/
- def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = {
+ def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
debuglog("infer view from "+from+" to "+to)//debug
if (isPastTyper) EmptyTree
else from match {
@@ -167,7 +180,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case PolyType(_, _) => EmptyTree
case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context)
+ val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context, saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) result.subst traverse tree
result.tree
}
@@ -205,22 +218,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* @return ...
*/
def checkStable(tree: Tree): Tree =
- if (treeInfo.isExprSafeToInline(tree)) tree
- else errorTree(
- tree,
- "stable identifier required, but "+tree+" found."+
- (if (isStableExceptVolatile(tree)) {
- val tpe = tree.symbol.tpe match {
- case PolyType(_, rtpe) => rtpe
- case t => t
- }
- "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile."
- } else ""))
+ if (treeInfo.isExprSafeToInline(tree)) tree else UnstableTreeError(tree)
/** Would tree be a stable (i.e. a pure expression) if the type
* of its symbol was not volatile?
*/
- private def isStableExceptVolatile(tree: Tree) = {
+ protected def isStableExceptVolatile(tree: Tree) = {
tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
{ val savedTpe = tree.symbol.info
val savedSTABLE = tree.symbol getFlag STABLE
@@ -234,21 +237,24 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
/** Check that `tpt` refers to a non-refinement class type */
- def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean) {
- def errorNotClass(found: AnyRef) = error(tpt.pos, "class type required but "+found+" found")
- def check(tpe: Type): Unit = tpe.normalize match {
+ def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean): Boolean = {
+ def errorNotClass(found: AnyRef) = { ClassTypeRequiredError(tpt, found); false }
+ def check(tpe: Type): Boolean = tpe.normalize match {
case TypeRef(pre, sym, _) if sym.isClass && !sym.isRefinementClass =>
- if (stablePrefix && !isPastTyper) {
- if (!pre.isStable)
- error(tpt.pos, "type "+pre+" is not a stable prefix")
- // A type projection like X#Y can get by the stable check if the
- // prefix is singleton-bounded, so peek at the tree too.
- else tpt match {
+ if (stablePrefix && !isPastTyper)
+ if (!pre.isStable) {
+ TypeNotAStablePrefixError(tpt, pre)
+ false
+ } else
+ // A type projection like X#Y can get by the stable check if the
+ // prefix is singleton-bounded, so peek at the tree too.
+ tpt match {
case SelectFromTypeTree(qual, _) if !isSingleType(qual.tpe) => errorNotClass(tpt)
- case _ => ;
- }
- }
- case ErrorType => ;
+ case _ => true
+ }
+ else
+ true
+ case ErrorType => true
case PolyType(_, restpe) => check(restpe)
case ExistentialType(_, restpe) if existentialOK => check(restpe)
case AnnotatedType(_, underlying, _) => check(underlying)
@@ -264,17 +270,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* @return <code>true</code> if <code>tp</code> is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
- def checkNotLocked(sym: Symbol): Boolean = {
+ def checkNotLocked(sym: Symbol) = {
sym.initialize
- sym.lockOK || {error(pos, "cyclic aliasing or subtyping involving "+sym); false}
+ sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
- (checkNotLocked(sym)) && (
- !sym.isNonClassType ||
- checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym) // @M! info for a type ref to a type parameter now returns a polytype
- // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
- )
+ checkNotLocked(sym) &&
+ ((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym))
+ // @M! info for a type ref to a type parameter now returns a polytype
+ // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
+
case SingleType(pre, sym) =>
checkNotLocked(sym)
/*
@@ -293,10 +299,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
- lockedSym.lock {
- throw new TypeError("illegal cyclic reference involving " + lockedSym)
- }
- checkNonCyclic(pos, tp)
+ if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+ else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
@@ -312,26 +316,24 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def checkParamsConvertible(pos: Position, tpe: Type) {
- tpe match {
- case MethodType(formals, restpe) =>
- /*
- if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
- error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters")
- if (formals exists (isRepeatedParamType(_)))
- error(pos, "methods with `*`-parameters cannot be converted to function values");
- */
- if (restpe.isDependent)
- error(pos, "method with dependent type "+tpe+" cannot be converted to function value")
- checkParamsConvertible(pos, restpe)
- case _ =>
- }
+ def checkParamsConvertible(tree: Tree, tpe0: Type) {
+ def checkParamsConvertible0(tpe: Type) =
+ tpe match {
+ case MethodType(formals, restpe) =>
+ /*
+ if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
+ error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters")
+ if (formals exists (isRepeatedParamType(_)))
+ error(pos, "methods with `*`-parameters cannot be converted to function values");
+ */
+ if (restpe.isDependent)
+ DependentMethodTpeConversionToFunctionError(tree, tpe)
+ checkParamsConvertible(tree, restpe)
+ case _ =>
+ }
+ checkParamsConvertible0(tpe0)
}
- def checkStarPatOK(pos: Position, mode: Int) =
- if ((mode & STARmode) == 0 && !isPastTyper)
- error(pos, "star patterns must correspond with varargs parameters")
-
/** Check that type of given tree does not contain local or private
* components.
*/
@@ -362,13 +364,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
check(NoSymbol, scope, pt, tree)
- def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
+ private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
hiddenSymbols = List()
val tp1 = apply(tree.tpe)
if (hiddenSymbols.isEmpty) tree setType tp1
- else if (hiddenSymbols exists (_.isErroneous)) setError(tree)
+ else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree)
else if (isFullyDefined(pt)) tree setType pt
else if (tp1.typeSymbol.isAnonymousClass)
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
@@ -376,10 +378,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
tree setType packSymbols(hiddenSymbols.reverse, tp1)
else if (!phase.erasedTypes) { // privates
val badSymbol = hiddenSymbols.head
- error(tree.pos,
- (if (badSymbol.isPrivate) "private " else "") + badSymbol +
- " escapes its defining scope as part of type "+tree.tpe)
- setError(tree)
+ SymbolEscapesScopeError(tree, badSymbol)
} else tree
}
@@ -441,28 +440,66 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
/** The qualifying class
* of a this or super with prefix <code>qual</code>.
+ * packageOk is equal false when qualifying class symbol
*/
- def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean): Symbol =
+ def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean = false): Option[Symbol] =
context.enclClass.owner.ownerChain.find(o => qual.isEmpty || o.isClass && o.name == qual) match {
case Some(c) if packageOK || !c.isPackageClass =>
- c
+ Some(c)
case _ =>
- error(
- tree.pos,
- if (qual.isEmpty) tree+" can be used only in a class, object, or template"
- else qual+" is not an enclosing class")
- NoSymbol
+ QualifyingClassError(tree, qual)
+ None
}
/** The typer for an expression, depending on where we are. If we are before a superclass
* call, this is a typer over a constructor context; otherwise it is the current typer.
*/
- def constrTyperIf(inConstr: Boolean): Typer =
+ @inline
+ final def constrTyperIf(inConstr: Boolean): Typer =
if (inConstr) {
- assert(context.undetparams.isEmpty)
+ assert(context.undetparams.isEmpty, context.undetparams)
newTyper(context.makeConstructorContext)
} else this
+ @inline
+ final def withCondConstrTyper[T](inConstr: Boolean)(f: Typer => T): T =
+ if (inConstr) {
+ assert(context.undetparams.isEmpty, context.undetparams)
+ val c = context.makeConstructorContext
+ typerWithLocalContext(c)(f)
+ } else {
+ f(this)
+ }
+
+ @inline
+ final def typerWithCondLocalContext[T](c: => Context)(cond: Boolean)(f: Typer => T): T =
+ if (cond) typerWithLocalContext(c)(f) else f(this)
+
+ @inline
+ final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = {
+ val res = f(newTyper(c))
+ if (c.hasErrors)
+ context.updateBuffer(c.flushAndReturnBuffer())
+ res
+ }
+
+ @inline
+ final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
+ val res = f(newTyper(c))
+ if (c.hasErrors)
+ context.issue(c.errBuffer.head)
+ res
+ }
+
+ @inline
+ final def withSavedContext[T](c: Context)(f: => T) = {
+ val savedErrors = c.flushAndReturnBuffer()
+ val res = f
+ c.updateBuffer(savedErrors)
+ res
+ }
+
+
/** The typer for a label definition. If this is part of a template we
* first have to enter the label definition.
*/
@@ -573,9 +610,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
inferExprAlternative(tree, pt)
val sym = tree.symbol
- def fail() = errorTree(tree, sym.kindString + " " + sym.fullName + " is not a value")
+ def fail() = NotAValueError(tree, sym)
- if (tree.tpe.isError) tree
+ if (tree.isErrorTyped) tree
else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
if (sym.isValue) checkStable(tree)
else fail()
@@ -629,15 +666,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def silent[T](op: Typer => T,
- reportAmbiguousErrors: Boolean = context.reportAmbiguousErrors,
- newtree: Tree = context.tree): Any /* in fact, TypeError or T */ = {
+ reportAmbiguousErrors: Boolean = context.ambiguousErrors,
+ newtree: Tree = context.tree): SilentResult[T] = {
val rawTypeStart = startCounter(rawTypeFailed)
val findMemberStart = startCounter(findMemberFailed)
val subtypeStart = startCounter(subtypeFailed)
val failedSilentStart = startTimer(failedSilentNanos)
try {
- if (context.reportGeneralErrors ||
- reportAmbiguousErrors != context.reportAmbiguousErrors ||
+ if (context.reportErrors ||
+ reportAmbiguousErrors != context.ambiguousErrors ||
newtree != context.tree) {
val context1 = context.makeSilent(reportAmbiguousErrors, newtree)
context1.undetparams = context.undetparams
@@ -648,40 +685,29 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- result
+ if (context1.hasErrors) SilentTypeError(context1.errBuffer.head)
+ else SilentResultValue(result)
} else {
- op(this)
+ assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
+ withSavedContext(context){
+ val res = op(this)
+ val errorsToReport = context.flushAndReturnBuffer()
+ if (errorsToReport.isEmpty) SilentResultValue(res) else SilentTypeError(errorsToReport.head)
+ }
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError =>
+ // fallback in case TypeError is still thrown
+ // @H this happens for example in cps annotation checker
stopCounter(rawTypeFailed, rawTypeStart)
stopCounter(findMemberFailed, findMemberStart)
stopCounter(subtypeFailed, subtypeStart)
stopTimer(failedSilentNanos, failedSilentStart)
- ex
+ SilentTypeError(TypeErrorWrapper(ex))
}
}
- /** Utility method: Try op1 on tree. If that gives an error try op2 instead.
- */
- def tryBoth(tree: Tree)(op1: (Typer, Tree) => Tree)(op2: (Typer, Tree) => Tree): Tree =
- silent(op1(_, tree)) match {
- case result1: Tree =>
- result1
- case ex1: TypeError =>
- silent(op2(_, resetAllAttrs(tree))) match {
- case result2: Tree =>
-// println("snd succeeded: "+result2)
- result2
- case ex2: TypeError =>
- reportTypeError(tree.pos, ex1)
- setError(tree)
- }
- }
-
- def isCodeType(tpe: Type) = tpe.typeSymbol isNonBottomSubClass CodeClass
-
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -724,8 +750,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
- context.undetparams =
- inferExprInstance(tree, context.extractUndetparams(), pt,
+ context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
// approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
mt.approximate,
// if we are looking for a manifest, instantiate type to Nothing anyway,
@@ -736,18 +761,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
useWeaklyCompatible = true) // #3808
}
- val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree))
- if (original != EmptyTree && pt != WildcardType)
- typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match {
- case result: Tree => result
- case ex: TypeError =>
- debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
- val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
- }
- else
- typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ // avoid throwing spurious DivergentImplicit errors
+ if (context.hasErrors)
+ return setError(tree)
+
+ withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
+ if (original != EmptyTree && pt != WildcardType)
+ typer1.silent(tpr => {
+ val withImplicitArgs = tpr.applyImplicitArgs(tree)
+ if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ else tpr.typed(withImplicitArgs, mode, pt)
+ }) match {
+ case SilentResultValue(result) =>
+ result
+ case _ =>
+ debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
+ val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
+ tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
+ }
+ else
+ typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ }
}
def instantiateToMethodType(mt: MethodType): Tree = {
@@ -758,11 +793,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (!meth.isConstructor && !meth.isMacro && isFunctionType(pt)) { // (4.2)
debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
- checkParamsConvertible(tree.pos, tree.tpe)
+ checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree)
// println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
- if (meth.typeParams.nonEmpty) {
+ if (context.undetparams.nonEmpty) {
// #2624: need to infer type arguments for eta expansion of a polymorphic method
// context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
// need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
@@ -775,9 +810,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
} else if (context.implicitsEnabled) {
- errorTree(tree, "missing arguments for " + meth + meth.locationString +
- (if (meth.isConstructor) ""
- else ";\nfollow this method with `_' if you want to treat it as a partially applied function"))
+ MissingArgsForMethodTpeError(tree, meth)
} else {
setError(tree)
}
@@ -792,8 +825,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
- errorTree(tree, tree.symbol + " takes type parameters")
- tree setType tree.tpe
+ MissingTypeParametersError(tree)
} else if ( // (7.1) @M: check kind-arity
// @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
(inHKMode(mode)) &&
@@ -808,9 +840,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// Note that we treat Any and Nothing as kind-polymorphic.
// We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
// (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
- errorTree(tree, tree.tpe + " takes " + countElementsAsString(tree.tpe.typeParams.length, "type parameter") +
- ", expected: " + countAsString(pt.typeParams.length))
- tree setType tree.tpe
+ KindArityMismatchError(tree, pt)
} else tree match { // (6)
case TypeTree() => tree
case _ => TypeTree(tree.tpe) setOriginal (tree) setPos (tree.pos)
@@ -836,12 +866,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
tree
}
} else {
- errorTree(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ CaseClassConstructorError(tree)
}
}
def insertApply(): Tree = {
- assert(!inHKMode(mode)) //@M
+ assert(!inHKMode(mode), modeString(mode)) //@M
val qual = adaptToName(tree, nme.apply) match {
case id @ Ident(_) =>
val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
@@ -911,7 +941,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (tree.isType)
adaptType()
- else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
+ else if (inExprModeButNot(mode, FUNmode) && tree.symbol != null && tree.symbol.isMacro && !tree.isDef) {
+ val tree1 = expandMacro(tree)
+ if (tree1.isErroneous) tree1 else typed(tree1, mode, pt)
+ } else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
adaptConstrPattern()
else if (inAllModes(mode, EXPRmode | FUNmode) &&
!tree.tpe.isInstanceOf[MethodType] &&
@@ -919,7 +952,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
applyPossible)
insertApply()
else if (!context.undetparams.isEmpty && !inPolyMode(mode)) { // (9)
- assert(!inHKMode(mode)) //@M
+ assert(!inHKMode(mode), modeString(mode)) //@M
if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
instantiateExpectingUnit(tree, mode)
else
@@ -957,7 +990,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (!context.undetparams.isEmpty) {
return instantiate(tree, mode, pt)
}
- if (context.implicitsEnabled && !tree.tpe.isError && !pt.isError) {
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
// (14); the condition prevents chains of views
debuglog("inferring view from " + tree.tpe + " to " + pt)
val coercion = inferView(tree, tree.tpe, pt, true)
@@ -969,9 +1002,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
return typed(Select(tree, meth), mode, pt)
}
if (coercion != EmptyTree) {
- debuglog("inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe)
- return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed(
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv.value)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
}
}
}
@@ -979,31 +1018,34 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
log("error tree = " + tree)
if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
}
- try {
- typeErrorTree(tree, tree.tpe, pt)
- } catch {
- case ex: TypeError =>
- if (isPastTyper && pt.existentialSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt))
- } else
- throw ex
+
+ val found = tree.tpe
+ val req = pt
+ if (!found.isErroneous && !req.isErroneous) {
+ if (!context.reportErrors && isPastTyper && req.existentialSkolems.nonEmpty) {
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
+ adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt))
+ } else {
+ // create an actual error
+ AdaptTypeError(tree, found, req)
+ }
}
+ setError(tree)
}
}
}
@@ -1020,7 +1062,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
val savedUndetparams = context.undetparams
silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
- case t: Tree => t
+ case SilentResultValue(t) => t
case _ =>
context.undetparams = savedUndetparams
val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
@@ -1046,17 +1088,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// Note: implicit arguments are still inferred (this kind of "chaining" is allowed)
)
}
+
+ def adaptToMember(qual: Tree, searchTemplate: Type): Tree =
+ adaptToMember(qual, searchTemplate, true, true)
+ def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree =
+ adaptToMember(qual, searchTemplate, reportAmbiguous, true)
- def adaptToMember(qual: Tree, searchTemplate: Type): Tree = {
+ def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
if (isAdaptableWithView(qual)) {
qual.tpe.widen.normalize match {
case et: ExistentialType =>
qual setType et.skolemizeExistential(context.owner, qual) // open the existential
case _ =>
}
- inferView(qual, qual.tpe, searchTemplate, true) match {
+ inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
case EmptyTree => qual
- case coercion => typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
+ case coercion =>
+ if (settings.logImplicitConv.value)
+ unit.echo(qual.pos,
+ "applied implicit conversion from %s to %s = %s".format(
+ qual.tpe, searchTemplate, coercion.symbol.defString))
+
+ typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
}
}
else qual
@@ -1070,13 +1123,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* If no conversion is found, return `qual` unchanged.
*
*/
- def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type): Tree = {
+ def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
- adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe))
+ adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
if (pt != WildcardType) {
silent(_ => doAdapt(pt)) match {
- case result: Tree if result != qual =>
+ case SilentResultValue(result) if result != qual =>
result
case _ =>
debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
@@ -1086,30 +1139,32 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
doAdapt(pt)
}
- /** Try o apply an implicit conversion to `qual` to that it contains
- * a method `name`. If that's ambiguous try taking arguments into account using `adaptToArguments`.
+ /** Try to apply an implicit conversion to `qual` so that it contains
+ * a method `name`. If that's ambiguous try taking arguments into
+ * account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int): Tree = {
- try {
- adaptToMember(qual, HasMember(name))
- } catch {
- case ex: TypeError =>
- // this happens if implicits are ambiguous; try again with more context info.
- // println("last ditch effort: "+qual+" . "+name)
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def onError(reportError: => Tree): Tree = {
context.tree match {
- case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => // try handling the arguments
- // println("typing args: "+args)
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
silent(_.typedArgs(args, mode)) match {
- case args: List[_] =>
- adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType)
- case _ =>
- throw ex
+ case SilentResultValue(xs) =>
+ val args = xs.asInstanceOf[List[Tree]]
+ if (args exists (_.isErrorTyped))
+ reportError
+ else
+ adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
+ case _ =>
+ reportError
}
case _ =>
- // println("not in an apply: "+context.tree+"/"+tree)
- throw ex
+ reportError
}
}
+ silent(_.adaptToMember(qual, HasMember(name), false)) match {
+ case SilentResultValue(res) => res
+ case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
+ }
}
/** Try to apply an implicit conversion to `qual` to that it contains a
@@ -1149,7 +1204,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// If first parent is a trait, make it first mixin and add its superclass as first parent
while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
val supertpt1 = typedType(supertpt)
- if (!supertpt1.tpe.isError) {
+ if (!supertpt1.isErrorTyped) {
mixins = supertpt1 :: mixins
supertpt = TypeTree(supertpt1.tpe.parents.head) setPos supertpt.pos.focus
}
@@ -1188,29 +1243,33 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
})
val outercontext = context.outer
- assert(clazz != NoSymbol)
+ assert(clazz != NoSymbol, templ)
val cscope = outercontext.makeNewScope(constr, outercontext.owner)
val cbody2 = newTyper(cscope) // called both during completion AND typing.
.typePrimaryConstrBody(clazz,
cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
+
superCall match {
case Apply(_, _) =>
val sarg = treeInfo.firstArgument(superCall)
if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- error(sarg.pos, firstParent+" is a trait; does not take constructor arguments")
- if (!supertparams.isEmpty) supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ ConstrArgsInTraitParentTpeError(sarg, firstParent)
+ if (!supertparams.isEmpty)
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
case _ =>
- if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
val preSuperVals = treeInfo.preSuperFields(templ.body)
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
else
- (preSuperStats, preSuperVals).zipped map { case (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe }
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
case _ =>
- if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
/* experimental: early types as type arguments
val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
@@ -1243,8 +1302,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
catch {
case ex: TypeError =>
- templ.tpe = null
- reportTypeError(templ.pos, ex)
+ // fallback in case of cyclic errors
+ // @H none of the tests enter here but I couldn't rule it out
+ ParentTypesError(templ, ex)
List(TypeTree(AnyRefClass.tpe))
}
@@ -1263,30 +1323,29 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* </ul>
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
+ val pending = ListBuffer[AbsTypeError]()
def validateParentClass(parent: Tree, superclazz: Symbol) {
- if (!parent.tpe.isError) {
+ if (!parent.isErrorTyped) {
val psym = parent.tpe.typeSymbol.initialize
checkClassType(parent, false, true)
if (psym != superclazz) {
if (psym.isTrait) {
val ps = psym.info.parents
if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol))
- error(parent.pos, "illegal inheritance; super"+superclazz+
- "\n is not a subclass of the super"+ps.head.typeSymbol+
- "\n of the mixin " + psym)
+ pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym)
} else {
- error(parent.pos, psym+" needs to be a trait to be mixed in")
+ pending += ParentNotATraitMixinError(parent, psym)
}
}
- if (psym.isFinal) {
- error(parent.pos, "illegal inheritance from final "+psym)
- }
+ if (psym.isFinal)
+ pending += ParentFinalInheritanceError(parent, psym)
+
if (psym.isSealed && !phase.erasedTypes) {
// AnyVal is sealed, but we have to let the value classes through manually
if (context.unit.source.file == psym.sourceFile || isValueClass(context.owner))
psym addChild context.owner
else
- error(parent.pos, "illegal inheritance from sealed "+psym+": " + context.unit.source.file.canonicalPath + " != " + psym.sourceFile.canonicalPath)
+ pending += ParentSealedInheritanceError(parent, psym)
}
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
@@ -1298,17 +1357,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
//Console.println(context.owner);//DEBUG
//Console.println(context.owner.unsafeTypeParams);//DEBUG
//Console.println(List.fromArray(context.owner.info.closure));//DEBUG
- error(parent.pos, "illegal inheritance;\n self-type "+
- selfType+" does not conform to "+parent +
- "'s selftype "+parent.tpe.typeOfThis)
+ pending += ParentSelfTypeConformanceError(parent, selfType)
if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
}
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
- error(parent.pos, psym+" is inherited twice")
+ pending += ParentInheritedTwiceError(parent, psym)
}
}
-
- if (!parents.isEmpty && !parents.head.tpe.isError)
+ if (!parents.isEmpty && parents.forall(!_.isErrorTyped))
for (p <- parents) validateParentClass(p, parents.head.tpe.typeSymbol)
/*
@@ -1318,13 +1374,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
*/
+ pending.foreach(ErrorUtils.issueTypeError)
}
def checkFinitary(classinfo: ClassInfoType) {
val clazz = classinfo.typeSymbol
+
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
- error(tparam.pos, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive")
val newinfo = ClassInfoType(
classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
classinfo.decls,
@@ -1335,6 +1392,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case _ => newinfo
}
}
+ FinitaryError(tparam)
}
}
}
@@ -1346,12 +1404,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedClassDef(cdef: ClassDef): Tree = {
// attributes(cdef)
val clazz = cdef.symbol
- val typedMods = removeAnnotations(cdef.mods)
- assert(clazz != NoSymbol)
+ val typedMods = typedModifiers(cdef.mods)
+ assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = newTyper(context.make(cdef.impl, clazz, new Scope))
- .typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
+ _.typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ }
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if ((clazz != ClassfileAnnotationClass) &&
(clazz isNonBottomSubClass ClassfileAnnotationClass))
@@ -1382,19 +1441,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize)
val clazz = mdef.symbol.moduleClass
- val typedMods = removeAnnotations(mdef.mods)
+ val typedMods = typedModifiers(mdef.mods)
assert(clazz != NoSymbol, mdef)
-
- val typer0 = newTyper(context.make(mdef.impl, clazz, new Scope))
- val impl1 = typer0.typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
- if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil
- else {
- clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
- }
- )
- })
+ val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
+ _.typedTemplate(mdef.impl, {
+ parentTypes(mdef.impl) ++ (
+ if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+ }
val impl2 = finishMethodSynthesis(impl1, clazz, context)
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
@@ -1482,8 +1541,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
/** Remove definition annotations from modifiers (they have been saved
* into the symbol's ``annotations'' in the type completer / namer)
+ *
+ * However reification does need annotation definitions to proceed.
+ * Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
+ * The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
+ * that involve locally defined annotations. See more about that in Reifiers.scala.
+ *
+ * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * The field doesn't get pickled/unpickled and exists only during a single compilation run.
+ * This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
- def removeAnnotations(mods: Modifiers): Modifiers =
+ def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
/**
@@ -1494,7 +1562,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// attributes(vdef)
val sym = vdef.symbol.initialize
val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
- val typedMods = removeAnnotations(vdef.mods)
+ val typedMods = typedModifiers(vdef.mods)
// complete lazy annotations
val annots = sym.annotations
@@ -1503,14 +1571,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (sym.hasAnnotation(definitions.VolatileAttr)) {
if (!sym.isMutable)
- error(vdef.pos, "values cannot be volatile")
+ VolatileValueError(vdef)
else if (sym.isFinal)
- error(vdef.pos, "final vars cannot be volatile")
+ FinalVolatileVarError(vdef)
}
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !isPastTyper)
- error(vdef.pos, "local variables must be initialized")
+ LocalVarUninitializedError(vdef)
vdef.rhs
} else {
val tpt2 = if (sym.hasDefault) {
@@ -1556,18 +1624,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(call, List())
}
val (superConstr, superArgs) = decompose(rhs)
- assert(superConstr.symbol ne null)//debug
+ assert(superConstr.symbol ne null, superConstr)//debug
+ val pending = ListBuffer[AbsTypeError]()
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; bug #473
for (arg <- superArgs ; tree <- arg) {
val sym = tree.symbol
if (sym != null && (sym.info.baseClasses contains clazz)) {
if (sym.isModule)
- error(tree.pos, "super constructor cannot be passed a self reference unless parameter is declared by-name")
+ pending += SuperConstrReferenceError(tree)
tree match {
case This(qual) =>
- error(tree.pos, "super constructor arguments cannot reference unconstructed `this`")
+ pending += SuperConstrArgsThisReferenceError(tree)
case _ => ()
}
}
@@ -1600,6 +1669,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
}
+ pending.foreach(ErrorUtils.issueTypeError)
}
/** Check if a structurally defined method violates implementation restrictions.
@@ -1648,8 +1718,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
lookupVariable(name.toString.substring(1), enclClass) match {
case Some(repl) =>
silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
- case tpt: Tree =>
- val alias = enclClass.newAliasType(useCase.pos, name.toTypeName)
+ case SilentResultValue(tpt) =>
+ val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
alias setInfo typeFun(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
context.scope.enter(alias)
@@ -1698,19 +1768,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
- error(vparam1.pos, "*-parameter must come last")
+ StarParamNotLastError(vparam1)
var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
- val typedMods = removeAnnotations(ddef.mods)
+ val typedMods = typedModifiers(ddef.mods)
var rhs1 =
if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
(!meth.owner.isClass ||
meth.owner.isModuleClass ||
meth.owner.isAnonOrRefinementClass))
- error(ddef.pos, "constructor definition not allowed here")
+ InvalidConstructorDefError(ddef)
typed(ddef.rhs)
} else if (meth.isMacro) {
EmptyTree
@@ -1725,30 +1795,26 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (!isPastTyper && meth.owner.isClass &&
meth.paramss.exists(ps => ps.exists(_.hasDefaultFlag) && isRepeatedParamType(ps.last.tpe)))
- error(meth.pos, "a parameter section with a `*'-parameter is not allowed to have default arguments")
+ StarWithDefaultError(meth)
if (!isPastTyper) {
val allParams = meth.paramss.flatten
for (p <- allParams) {
for (n <- p.deprecatedParamName) {
if (allParams.exists(p1 => p1.name == n || (p != p1 && p1.deprecatedParamName.exists(_ == n))))
- error(p.pos, "deprecated parameter name "+ n +" has to be distinct from any other parameter name (deprecated or not).")
+ DeprecatedParamNameError(p, n)
}
}
}
if (meth.isStructuralRefinementMember)
checkMethodStructuralCompatible(meth)
-
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
}
- def typedTypeDef(tdef: TypeDef): TypeDef = {
- def typeDefTyper = {
- if(tdef.tparams isEmpty) Typer.this
- else newTyper(context.makeNewScope(tdef, tdef.symbol))
+ def typedTypeDef(tdef: TypeDef): TypeDef =
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
+ _.typedTypeDef0(tdef)
}
- typeDefTyper.typedTypeDef0(tdef)
- }
// call typedTypeDef instead
// a TypeDef with type parameters must always be type checked in a new scope
@@ -1756,7 +1822,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
- val typedMods = removeAnnotations(tdef.mods)
+ val typedMods = typedModifiers(tdef.mods)
// complete lazy annotations
val annots = tdef.symbol.annotations
@@ -1770,10 +1836,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
checkNonCyclic(tdef.symbol)
if (tdef.symbol.owner.isType)
rhs1.tpe match {
- case TypeBounds(lo1, hi1) =>
- if (!(lo1 <:< hi1))
- error(tdef.pos, "lower bound "+lo1+" does not conform to upper bound "+hi1)
- case _ =>
+ case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1)
+ case _ => ()
}
treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType
}
@@ -1783,7 +1847,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
- context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), UnitClass.tpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
case _ =>
}
}
@@ -1804,7 +1868,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
- context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), restpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
ldef.params foreach (param => param.tpe = param.symbol.tpe)
treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs2) setSymbol sym2 setType restpe
@@ -1893,9 +1957,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
- error(x.pos, "_* may only come last")
+ StarPositionInPatternError(x)
- val pat1: Tree = typedPattern(cdef.pat, pattpe)
+ val pat1 = typedPattern(cdef.pat, pattpe)
// When case classes have more than two parameter lists, the pattern ends
// up typed as a method. We only pattern match on the first parameter
// list, so substitute the final result type of the method, i.e. the type
@@ -1936,10 +2000,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
*/
def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
val numVparams = fun.vparams.length
- val codeExpected = !forMSIL && (pt.typeSymbol isNonBottomSubClass CodeClass)
-
if (numVparams > definitions.MaxFunctionArity)
- return errorTree(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters")
+ return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
if ((isFunctionType(pt)
@@ -1954,20 +2016,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
- val (clazz, argpts, respt) = decompose(if (codeExpected) pt.normalize.typeArgs.head else pt)
-
+ val (clazz, argpts, respt) = decompose(pt)
if (argpts.lengthCompare(numVparams) != 0)
- errorTree(fun, "wrong number of parameters; expected = " + argpts.length)
+ WrongNumberOfParametersError(fun, argpts)
else {
- val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) =>
+ val vparamSyms = map2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
else {
fun match {
- case etaExpansion(vparams, fn, args) if !codeExpected =>
+ case etaExpansion(vparams, fn, args) =>
silent(_.typed(fn, forFunMode(mode), pt)) match {
- case fn1: Tree if context.undetparams.isEmpty =>
+ case SilentResultValue(fn1) if context.undetparams.isEmpty =>
// if context,undetparams is not empty, the function was polymorphic,
// so we need the missing arguments to infer its type. See #871
//println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
@@ -1978,7 +2039,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
case _ =>
}
- error(vparam.pos, missingParameterTypeMsg(fun, vparam, pt))
+ MissingParameterTypeError(fun, vparam, pt)
ErrorType
}
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
@@ -1992,18 +2053,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// for (vparam <- vparams) {
// checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
// }
- var body = typed(fun.body, respt)
+ val body1 = typed(fun.body, respt)
val formals = vparamSyms map (_.tpe)
- val restpe = packedType(body, fun.symbol).deconst.resultType
+ val restpe = packedType(body1, fun.symbol).deconst.resultType
val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
// body = checkNoEscaping.locals(context.scope, restpe, body)
- val fun1 = treeCopy.Function(fun, vparams, body).setType(funtpe)
- if (codeExpected) lifted(fun1) else fun1
- }
+ treeCopy.Function(fun, vparams, body1).setType(funtpe)
}
-
- def lifted(tree: Tree): Tree = typedPos(tree.pos) {
- Apply(Select(Ident(CodeModule), nme.lift_), List(tree))
}
def typedRefinement(stats: List[Tree]) {
@@ -2012,7 +2068,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
unit.toCheck += { () =>
// go to next outer context which is not silent, see #3614
var c = context
- while (!c.reportGeneralErrors) c = c.outer
+ while (c.bufferErrors) c = c.outer
val stats1 = newTyper(c).typedStats(stats, NoSymbol)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
@@ -2028,11 +2084,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Some(imp1: Import) => imp1
case None => log("unhandled import: "+imp+" in "+unit); imp
}
-
private def isWarnablePureExpression(tree: Tree) = tree match {
case EmptyTree | Literal(Constant(())) => false
case _ =>
- (treeInfo isExprSafeToInline tree) && {
+ !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
val sym = tree.symbol
(sym == null) || !(sym.isModule || sym.isLazy) || {
debuglog("'Pure' but side-effecting expression in statement position: " + tree)
@@ -2046,9 +2101,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def includesTargetPos(tree: Tree) =
tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
+ val statsErrors = scala.collection.mutable.LinkedHashSet[AbsTypeError]()
def typedStat(stat: Tree): Tree = {
if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat))
- errorTree(stat, "only declarations allowed here")
+ OnlyDeclarationsError(stat)
else
stat match {
case imp @ Import(_, _) =>
@@ -2061,20 +2117,25 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// the targetposition
stat
} else {
- val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
- else newTyper(context.make(stat, exprOwner))
+ val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) {
+ context.flushBuffer()
+ this
+ } else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
// in a constructor, even if it is the only thing in the constructor.
val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
- error(stat.pos, "called constructor's definition must precede calling constructor's definition")
+ ConstructorsOrderError(stat)
}
+
if (isWarnablePureExpression(result)) context.warning(stat.pos,
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
+ statsErrors ++= localTyper.context.errBuffer
result
}
}
@@ -2108,9 +2169,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// error for this is issued in RefChecks.checkDefaultsInOverloaded
if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
!e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
- error(e.sym.pos, e1.sym+" is defined twice"+
- {if(!settings.debug.value) "" else " in "+unit.toString}+
- {if (e.sym.isMacro && e1.sym.isMacro) " \n(note that macros cannot be overloaded)" else ""})
+ DefDefinedTwiceError(e.sym, e1.sym)
scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
e1 = scope.lookupNextEntry(e1)
@@ -2155,14 +2214,20 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}) ::: newStats.toList
}
}
- val result = stats mapConserve typedStat
- if (phase.erasedTypes) result
- else checkNoDoubleDefsAndAddSynthetics(result)
+
+ val stats1 = withSavedContext(context) {
+ val result = stats mapConserve typedStat
+ context.flushBuffer()
+ result
+ }
+ context.updateBuffer(statsErrors)
+ if (phase.erasedTypes) stats1
+ else checkNoDoubleDefsAndAddSynthetics(stats1)
}
def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
val typedMode = onlyStickyModes(mode) | newmode
- val t = constrTyperIf((mode & SCCmode) != 0).typed(arg, typedMode, pt)
+ val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
@@ -2195,15 +2260,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = {
def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass
- (formals, args).zipped exists {
+ exists2(formals, args) {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
- (formalargs, vparams).zipped.exists ((formalarg, vparam) =>
- vparam.tpt.isEmpty && (tparams exists (formalarg contains))) &&
- (tparams forall isLowerBounded)
+ ( exists2(formalargs, vparams)((formal, vparam) =>
+ vparam.tpt.isEmpty && (tparams exists formal.contains))
+ && (tparams forall isLowerBounded)
+ )
case _ =>
false
})
@@ -2230,8 +2296,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
- def errTree = setError(treeCopy.Apply(tree, fun0, args))
- def errorTree(msg: String) = { error(tree.pos, msg); errTree }
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
var fun = fun0
if (fun.hasSymbol && fun.symbol.isOverloaded) {
@@ -2294,8 +2360,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
arg1
}
context.undetparams = undetparams
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ if (context.hasErrors)
+ setError(tree)
+ else {
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
+ doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ }
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
@@ -2315,7 +2385,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case t: Tree =>
+ case SilentResultValue(t) =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
Some(t) filter (tupledTree =>
@@ -2323,7 +2393,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
|| tupledTree.symbol == null
|| checkValidAdaptation(tupledTree, args)
)
- case ex =>
+ case _ =>
context.undetparams = savedUndetparams
None
}
@@ -2338,21 +2408,21 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def tryNamesDefaults: Tree = {
val lencmp = compareLengths(args, formals)
- if (mt.isErroneous) errTree
- else if (inPatternMode(mode))
+ if (mt.isErroneous) duplErrTree
+ else if (inPatternMode(mode)) {
// #2064
- errorTree("wrong number of arguments for "+ treeSymTypeMsg(fun))
- else if (lencmp > 0) {
- tryTupleApply getOrElse errorTree("too many arguments for "+treeSymTypeMsg(fun))
+ duplErrorTree(WrongNumberOfArgsError(tree, fun))
+ } else if (lencmp > 0) {
+ tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
- errTree
+ duplErrTree
} else if (!isIdentity(argPos) && !sameLength(formals, params))
// !isIdentity indicates that named arguments are used to re-order arguments
- errorTree("when using named arguments, the vararg parameter has to be specified exactly once")
+ duplErrorTree(MultipleVarargError(tree))
else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
@@ -2366,7 +2436,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// calls to the default getters. Example:
// foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a))
val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x)
- if (fun1.isErroneous) errTree
+ if (fun1.isErroneous) duplErrTree
else {
assert(isNamedApplyBlock(fun1), fun1)
val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2
@@ -2383,17 +2453,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val lencmp2 = compareLengths(allArgs, formals)
if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) {
- errorTree("module extending its companion class cannot use default constructor arguments")
+ duplErrorTree(ModuleUsingCompanionClassDefaultArgsErrror(tree))
} else if (lencmp2 > 0) {
removeNames(Typer.this)(allArgs, params) // #3818
- errTree
+ duplErrTree
} else if (lencmp2 == 0) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
val note = "Error occurred in an application involving default arguments."
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
- tryTupleApply getOrElse errorTree(notEnoughArgumentsMsg(fun, missing))
+ tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
}
}
}
@@ -2452,15 +2522,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
-
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
doTypedApply(tree, fun, args, mode, pt)
} else {
- assert(!inPatternMode(mode)) // this case cannot arise for patterns
+ assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
+ val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
var remainingParams = paramTypes
def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
@@ -2474,11 +2543,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (!argtparams.isEmpty) {
val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
- }
- arg1
+ arg1
+ } else arg1
}
- val args1 = (args, formals).zipped map typedArgToPoly
- if (args1 exists (_.tpe.isError)) errTree
+ val args1 = map2(args, formals)(typedArgToPoly)
+ if (args1 exists {_.isErrorTyped}) duplErrTree
else {
debuglog("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
@@ -2495,12 +2564,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
doTypedApply(tree, fun setType fun.tpe.widen, args, mode, pt)
case ErrorType =>
- setError(treeCopy.Apply(tree, fun, args))
+ if (!tree.isErrorTyped) setError(tree) else tree
+ // @H change to setError(treeCopy.Apply(tree, fun, args))
/* --- begin unapply --- */
case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
if (args.length > MaxTupleArity)
- error(fun.pos, "too many arguments for unapply pattern, maximum = "+MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
//
def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
@@ -2508,17 +2578,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(Nil, param.tpe)
case PolyType(tparams, restpe) =>
createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
+ // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
case OverloadedType(_, _) =>
- error(fun.pos, "cannot resolve overloaded unapply")
+ OverloadedUnapplyError(fun)
(Nil, ErrorType)
case _ =>
- error(fun.pos, "an unapply method must accept a single argument.")
+ UnapplyWithSingleArgError(fun)
(Nil, ErrorType)
}
val unapp = unapplyMember(otpe)
val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(fun.pos, nme.SELECTOR_DUMMY) setFlag SYNTHETIC setInfo pt
+ val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
val arg = Ident(argDummy) setType pt
if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
@@ -2528,10 +2599,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
freeVars foreach unapplyContext.scope.enter
val typer1 = newTyper(unapplyContext)
- val pattp = typer1.infer.inferTypedPattern(tree.pos, unappFormal, arg.tpe)
+ val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe)
// turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => newExistentialSkolem(fv, unapplyContext.owner, fv))
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
arg.tpe = pattp.substSym(freeVars, skolems)
argDummy setInfo arg.tpe
}
@@ -2539,8 +2610,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// setType null is necessary so that ref will be stabilized; see bug 881
val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
- if (fun1.tpe.isErroneous) errTree
- else {
+ if (fun1.tpe.isErroneous) {
+ duplErrTree
+ } else {
val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
val formals1 = formalTypes(formals0, args.length)
if (sameLength(formals1, args)) {
@@ -2553,15 +2625,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val itype = glb(List(pt1, arg.tpe))
arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
UnApply(fun1, args1) setPos tree.pos setType itype
- }
- else {
- errorTree("wrong number of arguments for "+treeSymTypeMsg(fun))
- }
+ } else
+ duplErrorTree(WrongNumberArgsPatternError(tree, fun))
}
/* --- end unapply --- */
case _ =>
- errorTree(fun.tpe+" does not take parameters")
+ duplErrorTree(ApplyWithoutArgsError(tree, fun))
}
}
@@ -2573,8 +2643,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
var hasError: Boolean = false
- def error(pos: Position, msg: String) = {
- context.error(pos, msg)
+ val pending = ListBuffer[AbsTypeError]()
+
+ def reportAnnotationError(err: AbsTypeError) = {
+ pending += err
hasError = true
annotationError
}
@@ -2590,13 +2662,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case tpe => null
}
}
- def fail(msg: String) = { error(tr.pos, msg) ; None }
- if (const == null)
- fail("annotation argument needs to be a constant; found: " + tr)
- else if (const.value == null)
- fail("annotation argument cannot be null")
- else
+ if (const == null) {
+ reportAnnotationError(AnnotationNotAConstantError(tr)); None
+ } else if (const.value == null) {
+ reportAnnotationError(AnnotationArgNullError(tr)); None
+ } else
Some(LiteralAnnotArg(const))
}
@@ -2605,16 +2676,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
- error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method")
- None
+ reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
- if (annInfo.atp.isErroneous) {
- // recursive typedAnnotation call already printed an error, so don't call "error"
- hasError = true
- None
- } else Some(NestedAnnotArg(annInfo))
+ if (annInfo.atp.isErroneous) { hasError = true; None }
+ else Some(NestedAnnotArg(annInfo))
// use of Array.apply[T: ClassManifest](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
@@ -2629,13 +2696,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// BT = Int, .., String, Class[_], JavaAnnotClass
// T = BT | Array[BT]
// So an array literal as argument can only be valid if pt is Array[_]
- error(tree.pos, "found array constant, expected argument of type "+ pt)
+ reportAnnotationError(ArrayConstantsTypeMismatchError(tree, pt))
None
}
- else
- tryConst(tree, pt)
+ else tryConst(tree, pt)
- case Typed(t, _) => tree2ConstArg(t, pt)
+ case Typed(t, _) =>
+ tree2ConstArg(t, pt)
case tree =>
tryConst(tree, pt)
@@ -2655,13 +2722,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Select(New(tpt), nme.CONSTRUCTOR) =>
(fun, outerArgss)
case _ =>
- error(fun.pos, "unexpected tree in annotation: "+ fun)
+ reportAnnotationError(UnexpectedTreeAnnotation(fun))
(setError(fun), outerArgss)
}
extract(ann, List())
}
- if (fun.isErroneous) annotationError
+ val res = if (fun.isErroneous) annotationError
else {
val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
val annType = tpt.tpe
@@ -2671,9 +2738,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
- error(tpt.pos, "expected annotation of type "+ annClass.tpe +", found "+ annType)
+ reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
} else if (argss.length > 1) {
- error(ann.pos, "multiple argument lists on classfile annotation")
+ reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
} else {
val args =
if (argss.head.length == 1 && !isNamed(argss.head.head))
@@ -2689,10 +2756,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val sym = if (isJava) annScope.lookup(name)
else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol)
if (sym == NoSymbol) {
- error(arg.pos, "unknown annotation argument name: " + name)
+ reportAnnotationError(UnknownAnnotationNameError(arg, name))
(nme.ERROR, None)
} else if (!names.contains(sym)) {
- error(arg.pos, "duplicate value for annotation argument " + name)
+ reportAnnotationError(DuplicateValueAnnotationError(arg, name))
(nme.ERROR, None)
} else {
names -= sym
@@ -2701,21 +2768,21 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(sym.name, annArg)
}
case arg =>
- error(arg.pos, "classfile annotation arguments have to be supplied as named arguments")
+ reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg))
(nme.ERROR, None)
}
for (sym <- names) {
// make sure the flags are up to date before erroring (jvm/t3415 fails otherwise)
sym.initialize
if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefaultFlag)
- error(ann.pos, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name)
+ reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setPos(ann.pos)
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(ann).setPos(ann.pos)
}
} else if (requireJava) {
- error(ann.pos, "nested classfile annotations must be defined in java; found: "+ annType)
+ reportAnnotationError(NestedAnnotationError(ann, annType))
} else {
val typedAnn = if (selfsym == NoSymbol) {
typed(ann, mode, annClass.tpe)
@@ -2752,7 +2819,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- AnnotationInfo(annType, args, List()).setPos(t.pos)
+ AnnotationInfo(annType, args, List()).setOriginal(ann).setPos(t.pos)
case Block(stats, expr) =>
context.warning(t.pos, "Usage of named or default arguments transformed this annotation\n"+
@@ -2767,7 +2834,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
annInfo(fun)
case _ =>
- error(t.pos, "unexpected tree after typing annotation: "+ typedAnn)
+ reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn))
}
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
@@ -2777,11 +2844,43 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else annInfo(typedAnn)
}
}
+
+ if (hasError) {
+ pending.foreach(ErrorUtils.issueTypeError)
+ annotationError
+ } else res
}
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+ (hidden map { s =>
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ (s, s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }).toMap
+ }
+
/** Given a set `rawSyms` of term- and type-symbols, and a type
* `tp`, produce a set of fresh type parameters and a type so that
* it can be abstracted to an existential type. Every type symbol
@@ -2799,14 +2898,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* only the type of the Ident is changed.
*/
protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
val typeParams: List[Symbol] = rawSyms map { sym =>
val name = sym.name match {
case x: TypeName => x
- case x => newTypeName(x + ".type")
+ case x => nme.singletonName(x)
}
- val bound = sym.existentialBound
+ val bound = allBounds(sym)
val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(sym.pos, name)
+ val quantified = sowner.newExistential(name, sym.pos)
quantified setInfo bound.cloneInfo(quantified)
}
@@ -2824,9 +2924,22 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def packSymbols(hidden: List[Symbol], tp: Type): Type =
if (hidden.isEmpty) tp
else existentialTransform(hidden, tp)(existentialAbstraction)
+
+ def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
+ ctx.owner.isTerm &&
+ (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
+ {
+ var ctx1 = ctx.outer
+ while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
+ (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
+ }
def isCapturedExistential(sym: Symbol) =
- sym hasAllFlags (EXISTENTIAL | CAPTURED) // todo refine this
+ (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
+ val start = startTimer(isReferencedNanos)
+ try !isReferencedFrom(context, sym)
+ finally stopTimer(isReferencedNanos, start)
+ }
def packCaptured(tpe: Type): Type = {
val captured = mutable.Set[Symbol]()
@@ -2836,7 +2949,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
existentialAbstraction(captured.toList, tpe)
}
- /** convert skolems to existentials */
+ /** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) =
sym.isExistentialSkolem && sym.unpackLocation == tree ||
@@ -2863,7 +2976,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize)
else {
if (pre.isVolatile)
- context.error(tree.pos, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre)
+ InferTypeWithVolatileTypeSelectionError(tree, pre)
mapOver(tp)
}
case _ =>
@@ -2880,8 +2993,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
localSyms += sym
remainingSyms += sym
} else {
- unit.error(tree.pos,
- "can't existentially abstract over parameterized type " + tp)
+ AbstractExistentiallyOverParamerizedTpeError(tree, tp)
}
}
}
@@ -2926,7 +3038,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
override val typeParams = tparams map (_.symbol)
val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
// Replace the symbols
- def substitute() = (tparams, typeSkolems).zipped map (_ setSymbol _)
+ def substitute() = map2(tparams, typeSkolems)(_ setSymbol _)
override def complete(sym: Symbol) {
// The info of a skolem is the skolemized info of the
// actual type parameter of the skolem
@@ -2950,10 +3062,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
new DeSkolemizeMap mapOver tp
}
- def typedClassOf(tree: Tree, tpt: Tree) = {
- checkClassType(tpt, true, false)
- atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
- }
+ def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
+ if (!checkClassType(tpt, true, false) && noGen) tpt
+ else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
for (wc <- tree.whereClauses)
@@ -2962,10 +3073,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
if (vd.symbol.tpe.isVolatile)
- error(vd.pos, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
+ AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) =>
- TypeTree(ExistentialType(tparams, tp)) setOriginal tree
+ TypeTree(newExistentialType(tparams, tp)) setOriginal tree
)
}
@@ -2985,7 +3096,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
// inferPolyAlternatives loops... -- I have no idea why :-(
// ...actually this was looping anyway, see bug #278.
- return errorTree(fun, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ return TypedApplyWrongNumberOfTpeParametersError(fun, fun)
typedTypeApply(tree, mode, fun, args1)
case SingleType(_, _) =>
@@ -2993,12 +3104,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
val targs = args map (_.tpe)
- checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "")
+ checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (fun.symbol == Predef_classOf)
- typedClassOf(tree, args.head)
+ typedClassOf(tree, args.head, true)
else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && !targs.isEmpty)
- checkCheckable(tree.pos, targs.head, "")
+ checkCheckable(tree, targs.head, "")
val resultpe = restpe.instantiateTypeParams(tparams, targs)
//@M substitution in instantiateParams needs to be careful!
//@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int]
@@ -3009,12 +3120,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
treeCopy.TypeApply(tree, fun, args) setType resultpe
}
} else {
- errorTree(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ TypedApplyWrongNumberOfTpeParametersError(tree, fun)
}
case ErrorType =>
setError(treeCopy.TypeApply(tree, fun, args))
case _ =>
- errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
+ TypedApplyDoesNotTakeTpeParametersError(tree, fun)
}
@inline final def deindentTyping() = context.typingIndentLevel -= 2
@@ -3068,7 +3179,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* */
( context.owner
newLocalDummy (ann.pos)
- newValue (ann.pos, nme.self)
+ newValue (nme.self, ann.pos)
setInfo (arg1.tpe.withoutAnnotations)
)
}
@@ -3083,6 +3194,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// this annotation did not need it
if (ainfo.isErroneous)
+ // Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
ann.tpe = atype
@@ -3109,9 +3221,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (vble == NoSymbol)
vble =
if (isFullyDefined(pt))
- context.owner.newAliasType(tree.pos, name) setInfo pt
+ context.owner.newAliasType(name, tree.pos) setInfo pt
else
- context.owner.newAbstractType(tree.pos, name) setInfo TypeBounds.empty
+ context.owner.newAbstractType(name, tree.pos) setInfo TypeBounds.empty
val rawInfo = vble.rawInfo
vble = if (vble.name == tpnme.WILDCARD) context.scope.enter(vble)
else namer.enterInScope(vble)
@@ -3119,10 +3231,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def typedBindTerm(name: TermName) = {
if (vble == NoSymbol)
- vble = context.owner.newValue(tree.pos, name)
+ vble = context.owner.newValue(name, tree.pos)
if (vble.name.toTermName != nme.WILDCARD) {
if ((mode & ALTmode) != 0)
- error(tree.pos, "illegal variable in pattern alternative")
+ VariableInPatternAlternativeError(tree)
vble = namer.enterInScope(vble)
}
val body1 = typed(body, mode, pt)
@@ -3149,18 +3261,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
val varsym = lhs1.symbol
- def failMsg =
- if (varsym != null && varsym.isValue) "reassignment to val"
- else "assignment to non variable"
- def fail = {
- if (!lhs1.tpe.isError)
- error(tree.pos, failMsg)
+ // see #2494 for double error message example
+ def fail() =
+ if (lhs1.isErrorTyped) lhs1
+ else AssignmentError(tree, varsym)
- setError(tree)
- }
if (varsym == null)
- return fail
+ return fail()
if (treeInfo.mayBeVarGetter(varsym)) {
treeInfo.methPart(lhs1) match {
@@ -3176,7 +3284,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
}
- else fail
+ else fail()
}
def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = {
@@ -3223,7 +3331,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val owntype = elimAnonymousClass(owntype0)
if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype))
- (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
+ (MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
case Block(vd :: Nil, tree@Match(selector, cases)) =>
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
@@ -3246,12 +3354,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
enclMethod.owner.isConstructor ||
context.enclClass.enclMethod == enclMethod // i.e., we are in a constructor of a local class
) {
- errorTree(tree, "return outside method definition")
+ ReturnOutsideOfDefError(tree)
} else {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
- if (restpt.tpe eq null)
- errorTree(tree, enclMethod.owner + " has return statement; needs result type")
- else {
+ if (restpt.tpe eq null) {
+ ReturnWithoutTypeError(tree, enclMethod.owner)
+ } else {
context.enclMethod.returnsSeen = true
val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
// Warn about returning a value if no value can be returned.
@@ -3270,12 +3378,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedNew(tpt: Tree) = {
val tpt1 = {
val tpt0 = typedTypeConstructor(tpt)
- checkClassType(tpt0, false, true)
- if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
- context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
- TypeTree().setOriginal(tpt0)
- .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
- } else tpt0
+ if (checkClassType(tpt0, false, true))
+ if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
+ TypeTree().setOriginal(tpt0)
+ .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
+ } else tpt0
+ else tpt0
}
/** If current tree <tree> appears in <val x(: T)? = <tree>>
@@ -3294,17 +3403,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val tp = tpt1.tpe
val sym = tp.typeSymbol.initialize
if (sym.isAbstractType || sym.hasAbstractFlag)
- error(tree.pos, sym + " is abstract; cannot be instantiated")
+ IsAbstractError(tree, sym)
else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
)) {
- error(tree.pos, sym +
- " cannot be instantiated because it does not conform to its self-type "+
- tp.typeOfThis)
- }
- treeCopy.New(tree, tpt1).setType(tp)
+ DoesNotConformToSelfTypeError(tree, sym, tp.typeOfThis)
+ } else
+ treeCopy.New(tree, tpt1).setType(tp)
}
def typedEta(expr1: Tree): Tree = expr1.tpe match {
@@ -3342,22 +3449,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ErrorType =>
expr1
case _ =>
- errorTree(expr1, "_ must follow method; cannot follow " + expr1.tpe)
+ UnderscoreEtaError(expr1)
}
/**
* @param args ...
* @return ...
*/
- def tryTypedArgs(args: List[Tree], mode: Int, other: TypeError): List[Tree] = {
+ def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
val c = context.makeSilent(false)
c.retyping = true
try {
- newTyper(c).typedArgs(args, mode)
+ val res = newTyper(c).typedArgs(args, mode)
+ if (c.hasErrors) None else Some(res)
} catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError =>
- null
+ case ex: CyclicReference =>
+ throw ex
+ case te: TypeError =>
+ // @H some of typer erros can still leak,
+ // for instance in continuations
+ None
+ } finally {
+ c.flushBuffer()
}
}
@@ -3366,10 +3479,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
val start = startTimer(failedApplyNanos)
- silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
- case t: Tree =>
- t
- case ex: TypeError =>
+
+ def onError(typeError: AbsTypeError): Tree = {
stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
@@ -3392,27 +3503,39 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
case _ => Nil
})
- def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
- val retry = fun :: tree :: args exists errorInResult
+ def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos)
+
+ val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult)
printTyping {
val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
if (retry) "second try: " + funStr
- else "no second try: " + funStr + " because error not in result: " + ex.pos+"!="+tree.pos
+ else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos
}
if (retry) {
val Select(qual, name) = fun
- val args1 = tryTypedArgs(args, forArgMode(fun, mode), ex)
- val qual1 =
- if ((args1 ne null) && !pt.isError) adaptToArguments(qual, name, args1, pt)
- else qual
- if (qual1 ne qual) {
- val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
- return typed1(tree1, mode | SNDTRYmode, pt)
+ tryTypedArgs(args, forArgMode(fun, mode)) match {
+ case Some(args1) =>
+ assert((args1.length == 0) || !args1.head.tpe.isErroneous, "try typed args is ok")
+ val qual1 =
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
+ else qual
+ if (qual1 ne qual) {
+ val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
+ return typed1(tree1, mode | SNDTRYmode, pt)
+ }
+ case _ => ()
}
}
- reportTypeError(tree.pos, ex)
+ issue(typeError)
setError(treeCopy.Apply(tree, fun, args))
}
+
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
+ case SilentResultValue(t) =>
+ t
+ case SilentTypeError(err) =>
+ onError(err)
+ }
}
def typedApply(fun: Tree, args: List[Tree]) = {
@@ -3424,10 +3547,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val funpt = if (isPatternMode) pt else WildcardType
val appStart = startTimer(failedApplyNanos)
val opeqStart = startTimer(failedOpEqNanos)
+
+ def onError(reportError: => Tree): Tree = {
+ fun match {
+ case Select(qual, name)
+ if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ } else {
+ stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ case _ =>
+ stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ }
silent(_.typed(fun, forFunMode(mode), funpt),
- if ((mode & EXPRmode) != 0) false else context.reportAmbiguousErrors,
+ if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
if ((mode & EXPRmode) != 0) tree else context.tree) match {
- case fun1: Tree =>
+ case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
@@ -3454,40 +3595,21 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
//if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
// But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
// by calling ArrayClass.info here (or some other place before specialize).
- if (fun2.symbol == Array_apply) {
+ if (fun2.symbol == Array_apply && !res.isErrorTyped) {
val checked = gen.mkCheckInit(res)
// this check is needed to avoid infinite recursion in Duplicators
// (calling typed1 more than once for the same tree)
if (checked ne res) typed { atPos(tree.pos)(checked) }
else res
- } else if ((mode & FUNmode) == 0 && fun2.hasSymbol && fun2.symbol.isMacro)
- typed1(macroExpand(res), mode, pt)
- else
+ } else
res
- case ex: TypeError =>
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(name.decode) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args, ex)
- }
- else {
- stopTimer(failedApplyNanos, appStart)
- reportTypeError(fun.pos, ex)
- setError(tree)
- }
- case _ =>
- stopTimer(failedApplyNanos, appStart)
- reportTypeError(fun.pos, ex)
- setError(tree)
- }
+ case SilentTypeError(err) =>
+ onError({issue(err); setError(tree)})
}
}
}
- def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree], ex: TypeError): Tree = {
+ def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.subName(0, name.length - nme.EQL.length)
def mkAssign(vble: Tree): Tree =
Assign(
@@ -3528,25 +3650,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Apply(fn, indices) =>
treeInfo.methPart(fn) match {
case Select(table, nme.apply) => mkUpdate(table, indices)
- case _ => errorTree(qual, "Unexpected tree during assignment conversion.")
+ case _ => UnexpectedTreeAssignmentConversionError(qual)
}
}
typed1(tree1, mode, pt)
-/*
- debuglog("retry assign: "+tree1)
- silent(_.typed1(tree1, mode, pt)) match {
- case t: Tree =>
- t
- case _ =>
- reportTypeError(tree.pos, ex)
- setError(tree)
- }
-*/
}
- def qualifyingClassSym(qual: Name): Symbol =
- if (tree.symbol != NoSymbol) tree.symbol else qualifyingClass(tree, qual, false)
-
def typedSuper(qual: Tree, mix: TypeName) = {
val qual1 = typed(qual)
@@ -3571,12 +3680,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// println(mix)
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
+ ErrorType
} else {
- error(tree.pos, mix+" does not name a parent class of "+clazz)
+ MixinMissingParentClassNameError(tree, mix, clazz)
+ ErrorType
}
- ErrorType
} else if (!ps.tail.isEmpty) {
- error(tree.pos, "ambiguous parent class qualifier")
+ AmbiguousParentClassError(tree)
ErrorType
} else {
ps.head
@@ -3593,16 +3703,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
findMixinSuper(clazz.tpe)
}
- treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
- }
+ treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
+ }
def typedThis(qual: Name) = {
- val clazz = qualifyingClassSym(qual)
- if (clazz == NoSymbol) setError(tree)
- else {
- tree setSymbol clazz setType clazz.thisType.underlying
- if (isStableContext(tree, mode, pt)) tree setType clazz.thisType
- tree
+ val qualifyingClassSym = if (tree.symbol != NoSymbol) Some(tree.symbol) else qualifyingClass(tree, qual)
+ qualifyingClassSym match {
+ case Some(clazz) =>
+ tree setSymbol clazz setType clazz.thisType.underlying
+ if (isStableContext(tree, mode, pt)) tree setType clazz.thisType
+ tree
+ case None => tree
}
}
@@ -3634,12 +3745,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) {
val qual1 =
if (member(qual, name) != NoSymbol) qual
- else adaptToMemberWithArgs(tree, qual, name, mode)
- if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
- }
+ else adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if (qual1 ne qual)
+ return typed(treeCopy.Select(tree, qual1, name), mode, pt)
+ }
if (!reallyExists(sym)) {
- if (context.owner.toplevelClass.isJavaDefined && name.isTypeName) {
+ if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
@@ -3666,7 +3778,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
)
}
- def makeErrorTree = {
+ def makeInteractiveErrorTree = {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
@@ -3675,24 +3787,31 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
if (name == nme.ERROR && forInteractive)
- return makeErrorTree
+ return makeInteractiveErrorTree
if (!qual.tpe.widen.isErroneous) {
if ((mode & QUALmode) != 0) {
val lastTry = missingHook(qual.tpe.typeSymbol, name)
if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
}
- notAMemberError(tree.pos, qual, name)
+ NotAMemberError(tree, qual, name)
}
- if (forInteractive) makeErrorTree else setError(tree)
+ if (forInteractive) makeInteractiveErrorTree else setError(tree)
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
- val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual)
- val result = stabilize(tree2, pre2, mode, pt)
+ val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+ case SilentTypeError(err) =>
+ if (err.kind != ErrorKinds.Access) {
+ context issue err
+ return setError(tree)
+ } else (tree1, Some(err))
+ case SilentResultValue(treeAndPre) =>
+ (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
+ }
def isPotentialNullDeference() = {
!isPastTyper &&
@@ -3711,16 +3830,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
result,
(TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
// will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
- checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
+ checkBounds(qual, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
qual // you only get to see the wrapped tree after running this check :-p
}) setType qual.tpe setPos qual.pos,
name)
- case accErr: Inferencer#AccessError =>
- val qual1 =
- try adaptToMemberWithArgs(tree, qual, name, mode)
- catch { case _: TypeError => qual }
- if (qual1 ne qual) typed(Select(qual1, name) setPos tree.pos, mode, pt)
- else accErr.emit()
+ case _ if accessibleError.isDefined =>
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+ if (!qual1.isErrorTyped && (qual1 ne qual))
+ typed(Select(qual1, name) setPos tree.pos, mode, pt)
+ else {
+ issue(accessibleError.get)
+ setError(tree)
+ }
case _ =>
result
}
@@ -3728,7 +3849,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// getClass, we have to catch it immediately so expressions
// like x.getClass().newInstance() are typed with the type of x.
val isRefinableGetClass = (
- selection.symbol.name == nme.getClass_
+ !selection.isErrorTyped
+ && selection.symbol.name == nme.getClass_
&& selection.tpe.params.isEmpty
// TODO: If the type of the qualifier is inaccessible, we can cause private types
// to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
@@ -3736,7 +3858,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
&& qual.tpe.typeSymbol.isPublic
)
if (isRefinableGetClass)
- selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe))
+ selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe))
else
selection
}
@@ -3750,8 +3872,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
* (2) Change imported symbols to selections
*/
def typedIdent(name: Name): Tree = {
- def ambiguousError(msg: String) =
- error(tree.pos, "reference to " + name + " is ambiguous;\n" + msg)
+ var errorContainer: AbsTypeError = null
+ @inline
+ def ambiguousError(msg: String) = {
+ assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
+ errorContainer = AmbiguousIdentError(tree, name, msg)
+ }
+ @inline
+ def identError(tree: AbsTypeError) = {
+ assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
+ errorContainer = tree
+ }
var defSym: Symbol = tree.symbol // the directly found symbol
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
@@ -3789,11 +3920,31 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
var cx = startingIdentContext
while (defSym == NoSymbol && cx != NoContext) {
+ // !!! Shouldn't the argument to compileSourceFor be cx, not context?
+ // I can't tell because those methods do nothing in the standard compiler,
+ // presumably they are overridden in the IDE.
currentRun.compileSourceFor(context.asInstanceOf[analyzer.Context], name)
pre = cx.enclClass.prefix
defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
- defSym = defEntry.sym
+ // Right here is where SI-1987, overloading in package objects, can be
+ // seen to go wrong. There is an overloaded symbol, but when referring
+ // to the unqualified identifier from elsewhere in the package, only
+ // the last definition is visible. So overloading mis-resolves and is
+ // definition-order dependent, bad things. See run/t1987.scala.
+ //
+ // I assume the actual problem involves how/where these symbols are entered
+ // into the scope. But since I didn't figure out how to fix it that way, I
+ // catch it here by looking up package-object-defined symbols in the prefix.
+ if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
+ defSym = pre.member(defEntry.sym.name)
+ if (defSym ne defEntry.sym) {
+ log("!!! Overloaded package object member resolved incorrectly.\n Discarded: " +
+ defEntry.sym.defString + "\n Using: " + defSym.defString)
+ }
+ }
+ else
+ defSym = defEntry.sym
}
else {
cx = cx.enclClass
@@ -3851,7 +4002,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
ambiguousError(
"it is imported twice in the same scope by\n"+imports.head + "\nand "+imports1.head)
}
- while (!imports1.isEmpty &&
+ while (errorContainer == null && !imports1.isEmpty &&
(!imports.head.isExplicitImport(name) ||
imports1.head.depth == imports.head.depth)) {
var impSym1 = imports1.head.importedSymbol(name)
@@ -3885,66 +4036,49 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (inaccessibleSym eq NoSymbol) {
// Avoiding some spurious error messages: see SI-2388.
if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
- else {
- val similar = (
- // name length check to limit unhelpful suggestions for e.g. "x" and "b1"
- if (name.length > 2) {
- val allowed = (
- startingIdentContext.enclosingContextChain
- flatMap (ctx => ctx.scope.toList ++ ctx.imports.flatMap(_.allImportedSymbols))
- filter (sym => sym.isTerm == name.isTermName)
- filterNot (sym => sym.isPackage || sym.isSynthetic || sym.hasMeaninglessName)
- )
- val allowedStrings = (
- allowed.map("" + _.name).distinct.sorted
- filterNot (s => (s contains '$') || (s contains ' '))
- )
- similarString("" + name, allowedStrings)
- }
- else ""
- )
- error(tree.pos, "not found: "+decodeWithKind(name, context.owner) + similar)
- }
- }
- else new AccessError(
- tree, inaccessibleSym, context.enclClass.owner.thisType,
- inaccessibleExplanation
- ).emit()
+ else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
+ } else
+ identError(InferErrorGen.AccessError(
+ tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
+ inaccessibleExplanation
+ ))
defSym = context.owner.newErrorSymbol(name)
}
}
}
- if (defSym.owner.isPackageClass)
- pre = defSym.owner.thisType
+ if (errorContainer != null) {
+ ErrorUtils.issueTypeError(errorContainer)
+ setError(tree)
+ } else {
+ if (defSym.owner.isPackageClass)
+ pre = defSym.owner.thisType
- // Inferring classOf type parameter from expected type.
- if (defSym.isThisSym) {
- typed1(This(defSym.owner) setPos tree.pos, mode, pt)
- }
- // Inferring classOf type parameter from expected type. Otherwise an
- // actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
- typedClassOf(tree, TypeTree(pt.typeArgs.head))
- else {
- val tree1 = (
- if (qual == EmptyTree) tree
- // atPos necessary because qualifier might come from startContext
- else atPos(tree.pos)(Select(qual, name))
- )
- val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- stabilize(tree2, pre2, mode, pt) match {
- case accErr: Inferencer#AccessError => accErr.emit()
- case result => result
+ // Inferring classOf type parameter from expected type.
+ if (defSym.isThisSym) {
+ typed1(This(defSym.owner) setPos tree.pos, mode, pt)
+ }
+ // Inferring classOf type parameter from expected type. Otherwise an
+ // actual call to the stubbed classOf method is generated, returning null.
+ else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ typedClassOf(tree, TypeTree(pt.typeArgs.head))
+ else {
+ val tree1 = (
+ if (qual == EmptyTree) tree
+ // atPos necessary because qualifier might come from startContext
+ else atPos(tree.pos)(Select(qual, name))
+ )
+ val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
+ // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
+ stabilize(tree2, pre2, mode, pt)
}
}
}
def typedCompoundTypeTree(templ: Template) = {
val parents1 = templ.parents mapConserve (typedType(_, mode))
- if (parents1 exists (_.tpe.isError)) tree setType ErrorType
+ if (parents1 exists (_.isErrorTyped)) tree setType ErrorType
else {
- val decls = new Scope
+ val decls = newScope
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ.body)
@@ -3954,10 +4088,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedAppliedTypeTree(tpt: Tree, args: List[Tree]) = {
val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
- if (tpt1.tpe.isError) {
- setError(tree)
+ if (tpt1.isErrorTyped) {
+ tpt1
} else if (!tpt1.hasSymbol) {
- errorTree(tree, tpt1.tpe+" does not take type parameters")
+ AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
if (sameLength(tparams, args)) {
@@ -3972,7 +4106,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
val argtypes = args1 map (_.tpe)
- (args, tparams).zipped foreach { (arg, tparam) => arg match {
+ foreach2(args, tparams)((arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3981,7 +4115,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
- }}
+ })
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
@@ -3989,16 +4123,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
// (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
- checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
+ checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
result // you only get to see the wrapped tree after running this check :-p
} setType (result.tpe) setPos(result.pos)
else result
} else if (tparams.isEmpty) {
- errorTree(tree, tpt1.tpe+" does not take type parameters")
+ AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
//Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
- errorTree(tree, "wrong number of type arguments for "+tpt1.tpe+", should be "+tparams.length)
+ AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
}
}
}
@@ -4045,8 +4179,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
for (useCase <- comment.useCases) {
typer1.silent(_.typedUseCase(useCase)) match {
- case ex: TypeError =>
- unit.warning(useCase.pos, ex.msg)
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
case _ =>
}
for (useCaseSym <- useCase.defined) {
@@ -4061,17 +4195,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
typedAnnotated(constr, typed(arg, mode, pt))
case tree @ Block(_, _) =>
- newTyper(context.makeNewScope(tree, context.owner))
- .typedBlock(tree, mode, pt)
+ typerWithLocalContext(context.makeNewScope(tree, context.owner)){
+ _.typedBlock(tree, mode, pt)
+ }
case Alternative(alts) =>
val alts1 = alts mapConserve (alt => typed(alt, mode | ALTmode, pt))
treeCopy.Alternative(tree, alts1) setType pt
case Star(elem) =>
- checkStarPatOK(tree.pos, mode)
- val elem1 = typed(elem, mode, pt)
- treeCopy.Star(tree, elem1) setType makeFullyDefined(pt)
+ if ((mode & STARmode) == 0 && !isPastTyper)
+ StarPatternWithVarargParametersError(tree)
+ treeCopy.Star(tree, typed(elem, mode, pt)) setType makeFullyDefined(pt)
case Bind(name, body) =>
typedBind(name, body)
@@ -4079,7 +4214,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case UnApply(fun, args) =>
val fun1 = typed(fun)
val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length)
- val args1 = (args, tpes).zipped map typedPattern
+ val args1 = map2(args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
case ArrayValue(elemtpt, elems) =>
@@ -4087,9 +4222,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case tree @ Function(_, _) =>
if (tree.symbol == NoSymbol)
- tree.symbol = context.owner.newValue(tree.pos, nme.ANON_FUN_NAME)
- .setFlag(SYNTHETIC).setInfo(NoType)
- newTyper(context.makeNewScope(tree, tree.symbol)).typedFunction(tree, mode, pt)
+ tree.symbol = context.owner.newAnonymousFunctionValue(tree.pos)
+ typerWithLocalContext(context.makeNewScope(tree, tree.symbol))(_.typedFunction(tree, mode, pt))
case Assign(lhs, rhs) =>
typedAssign(lhs, rhs)
@@ -4128,17 +4262,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Typed(expr, Function(List(), EmptyTree)) =>
typedEta(checkDead(typed1(expr, mode, pt)))
- case Typed(expr, tpt @ Ident(tpnme.WILDCARD_STAR)) =>
- val expr0 = typed(expr, onlyStickyModes(mode), WildcardType)
+ case Typed(expr0, tpt @ Ident(tpnme.WILDCARD_STAR)) =>
+ val expr = typed(expr0, onlyStickyModes(mode), WildcardType)
def subArrayType(pt: Type) =
if (isValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
else {
val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- ExistentialType(List(tparam), arrayType(tparam.tpe))
+ newExistentialType(List(tparam), arrayType(tparam.tpe))
}
- val (expr1, baseClass) = expr0.tpe.typeSymbol match {
- case ArrayClass => (adapt(expr0, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(expr0, onlyStickyModes(mode), seqType(pt)), SeqClass)
+
+ val (expr1, baseClass) = expr.tpe.typeSymbol match {
+ case ArrayClass => (adapt(expr, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
+ case _ => (adapt(expr, onlyStickyModes(mode), seqType(pt)), SeqClass)
}
expr1.tpe.baseType(baseClass) match {
case TypeRef(_, _, List(elemtp)) =>
@@ -4150,11 +4285,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case Typed(expr, tpt) =>
val tpt1 = typedType(tpt, mode)
val expr1 = typed(expr, onlyStickyModes(mode), tpt1.tpe.deconst)
- val owntype =
- if (isPatternMode) inferTypedPattern(tpt1.pos, tpt1.tpe, pt)
- else tpt1.tpe
- //Console.println(typed pattern: "+tree+":"+", tp = "+tpt1.tpe+", pt = "+pt+" ==> "+owntype)//DEBUG
- treeCopy.Typed(tree, expr1, tpt1) setType owntype
+ val ownType = if (isPatternMode) inferTypedPattern(tpt1, tpt1.tpe, pt) else tpt1.tpe
+ treeCopy.Typed(tree, expr1, tpt1) setType ownType
case TypeApply(fun, args) =>
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
@@ -4206,12 +4338,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// convert new Array^N[T](len) for N > 1 to evidence[ClassManifest[T]].newArrayN(len)
val Some((level, manifType)) = erasure.GenericArray.unapply(tpt.tpe)
if (level > MaxArrayDims)
- error(tree.pos, "cannot create a generic multi-dimensional array of more than "+MaxArrayDims+" dimensions")
- val newArrayApp = atPos(tree.pos) {
- val manif = getManifestTree(tree.pos, manifType, false)
- new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
+ MultiDimensionalArrayError(tree)
+ else {
+ val newArrayApp = atPos(tree.pos) {
+ val manif = getManifestTree(tree, manifType, false)
+ new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
+ }
+ typed(newArrayApp, mode, pt)
}
- typed(newArrayApp, mode, pt)
case tree1 =>
tree1
}
@@ -4242,18 +4376,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val tree1 = // temporarily use `filter` and an alternative for `withFilter`
if (name == nme.withFilter)
silent(_ => typedSelect(qual1, name)) match {
- case result1: Tree =>
- result1
- case ex1: TypeError =>
+ case SilentResultValue(result) =>
+ result
+ case _ =>
silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case result2: Tree =>
+ case SilentResultValue(result2) =>
unit.deprecationWarning(
tree.pos, "`withFilter' method does not yet exist on "+qual1.tpe.widen+
", using `filter' method instead")
result2
- case ex2: TypeError =>
- reportTypeError(tree.pos, ex1)
- setError(tree)
+ case SilentTypeError(err) =>
+ WithFilterError(tree, err)
}
}
else
@@ -4270,6 +4403,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
typedIdent(name)
+ case ReferenceToBoxed(idt @ Ident(_)) =>
+ val id1 = typed1(idt, mode, pt) match { case id: Ident => id }
+ treeCopy.ReferenceToBoxed(tree, id1) setType AnyRefClass.tpe
+
case Literal(value) =>
tree setType (
if (value.tag == UnitTag) UnitClass.tpe
@@ -4282,8 +4419,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case SelectFromTypeTree(qual, selector) =>
val qual1 = typedType(qual, mode)
- if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe)
- typedSelect(qual1, selector)
+ if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual)
+ else typedSelect(qual1, selector)
case CompoundTypeTree(templ) =>
typedCompoundTypeTree(templ)
@@ -4297,7 +4434,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
case etpt @ ExistentialTypeTree(_, _) =>
- newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
+ typerWithLocalContext(context.makeNewScope(tree, context.owner)){
+ _.typedExistentialTypeTree(etpt, mode)
+ }
case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
case tpt @ TypeTree() =>
@@ -4309,7 +4448,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// whatever type to tree; we just have to survive until a real error message is issued.
tree setType AnyClass.tpe
case Import(expr, selectors) =>
- assert(forInteractive) // should not happen in normal circumstances.
+ assert(forInteractive, "!forInteractive") // should not happen in normal circumstances.
tree setType tree.symbol.tpe
case _ =>
abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
@@ -4347,7 +4486,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
- "silent" -> !context.reportGeneralErrors,
+ "silent" -> context.bufferErrors,
"context.owner" -> context.owner
)
)
@@ -4367,16 +4506,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
tree1, tree1.tpe.widen, pt, context.undetparamsString)
) //DEBUG
}
-
-// for (t <- tree1.tpe) assert(t != WildcardType)
-// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
result
} catch {
case ex: TypeError =>
tree.tpe = null
+ // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
- reportTypeError(tree.pos, ex)
+
+ reportTypeError(context, tree.pos, ex)
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
@@ -4398,6 +4536,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
+ def expandMacro(tree: Tree): Tree =
+ macroExpand(tree, context) match {
+ case Some(t: Tree) => t
+ case Some(t) => MacroExpandError(tree, t)
+ case None => setError(tree) // error already reported
+ }
+
def atOwner(owner: Symbol): Typer =
newTyper(context.make(context.tree, owner))
@@ -4481,22 +4626,22 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// to see are those in the signatures. These do not need a unique object as a prefix.
// The situation is different for new's and super's, but scalac does not look deep
// enough to see those. See #3938
- error(tree.pos, restpe.prefix+" is not a legal prefix for a constructor")
- }
-
- //@M fix for #2208
- // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef
- if(result.tpe.typeArgs.isEmpty) {
- // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) {
- // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not
- // designed to deal with the cycles in the scala package (ScalaObject extends
- // AnyRef, but the AnyRef type alias is entered after the scala package is
- // loaded and completed, so that ScalaObject is unpickled while AnyRef is not
- // yet defined )
- result setType(restpe)
- } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
- // during uncurry (after refchecks), all types are normalized
- result
+ ConstructorPrefixError(tree, restpe)
+ } else {
+ //@M fix for #2208
+ // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef
+ if (result.tpe.typeArgs.isEmpty) {
+ // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) {
+ // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not
+ // designed to deal with the cycles in the scala package (ScalaObject extends
+ // AnyRef, but the AnyRef type alias is entered after the scala package is
+ // loaded and completed, so that ScalaObject is unpickled while AnyRef is not
+ // yet defined )
+ result setType(restpe)
+ } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
+ // during uncurry (after refchecks), all types are normalized
+ result
+ }
}
}
@@ -4520,11 +4665,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
true, false, context)
}
- def getManifestTree(pos: Position, tp: Type, full: Boolean): Tree = {
+ def getManifestTree(tree: Tree, tp: Type, full: Boolean): Tree = {
val manifestOpt = findManifest(tp, full)
if (manifestOpt.tree.isEmpty) {
- error(pos, "cannot find "+(if (full) "" else "class ")+"manifest for element type "+tp)
- Literal(Constant(null))
+ MissingManifestError(tree, full, tp)
} else {
manifestOpt.tree
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 4f7e6225e1..19b8632ed7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -21,9 +21,11 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
+ private val unapplyParamName = nme.x_0
+
/** returns type list for return type of the extraction */
def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
- assert(ufn.isMethod)
+ assert(ufn.isMethod, ufn)
//Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
ufn.name match {
case nme.unapply => unapplyTypeListFromReturnType(ufntpe)
@@ -112,7 +114,7 @@ trait Unapplies extends ast.TreeDSL
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
- vparamss map (_ map copyUntyped[ValDef])
+ mmap(vparamss)(copyUntyped[ValDef])
}
/** The return value of an unapply method of a case class C[Ts]
@@ -165,7 +167,7 @@ trait Unapplies extends ast.TreeDSL
val cparamss = constrParamss(cdef)
atPos(cdef.pos.focus)(
DefDef(caseMods, nme.apply, tparams, cparamss, classType(cdef, tparams),
- New(classType(cdef, tparams), cparamss map (_ map gen.paramToArg)))
+ New(classType(cdef, tparams), mmap(cparamss)(gen.paramToArg)))
)
}
@@ -173,14 +175,13 @@ trait Unapplies extends ast.TreeDSL
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
- val paramName = newTermName("x$0")
val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), paramName, classType(cdef, tparams), EmptyTree))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(paramName))
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
@@ -201,12 +202,12 @@ trait Unapplies extends ast.TreeDSL
def paramWithDefault(vd: ValDef) =
treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
- val paramss = cparamss map (_ map paramWithDefault)
+ val paramss = mmap(cparamss)(paramWithDefault)
val classTpe = classType(cdef, tparams)
Some(atPos(cdef.pos.focus)(
DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, classTpe,
- New(classTpe, paramss map (_ map toIdent)))
+ New(classTpe, mmap(paramss)(toIdent)))
))
}
}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 1db6c38b4d..24c9926ad8 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -71,13 +71,35 @@ object DocStrings {
* Every section starts with a `@` and extends to the next `@`, or
* to the end of the comment string, but excluding the final two
* characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
*/
def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
case List() => List()
- case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ case idxs => {
+ val idxs2 = mergeUsecaseSections(str, idxs)
+ idxs2 zip (idxs2.tail ::: List(str.length - 2))
+ }
}
-
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.find(str.substring(_).startsWith("@usecase")) match {
+ case Some(firstUC) =>
+ val commentSections = idxs.take(idxs.indexOf(firstUC))
+ val usecaseSections = idxs.drop(idxs.indexOf(firstUC)).filter(str.substring(_).startsWith("@usecase"))
+ commentSections ::: usecaseSections
+ case None =>
+ idxs
+ }
+ }
+
/** Does interval `iv` start with given `tag`?
*/
def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index a1ec90ed3f..53c767be20 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -9,7 +9,30 @@ package util
object Position {
val tabInc = 8
+
+ /** Prints the message with the given position indication. */
+ def formatMessage(posIn: Position, msg: String, shortenFile: Boolean): String = {
+ val pos = (
+ if (posIn eq null) NoPosition
+ else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
+ else posIn
+ )
+ def file = pos.source.file
+ def prefix = if (shortenFile) file.name else file.path
+
+ pos match {
+ case FakePos(fmsg) => fmsg+" "+msg
+ case NoPosition => msg
+ case _ =>
+ List(
+ "%s:%s: %s".format(prefix, pos.line, msg),
+ pos.lineContent.stripLineEnd,
+ " " * (pos.column - 1) + "^"
+ ) mkString "\n"
+ }
+ }
}
+
/** The Position class and its subclasses represent positions of ASTs and symbols.
* Except for NoPosition and FakePos, every position refers to a SourceFile
* and to an offset in the sourcefile (its `point`). For batch compilation,
diff --git a/src/compiler/scala/tools/nsc/util/ProxyReport.scala b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
index 86cf2006bb..2f4f029308 100644
--- a/src/compiler/scala/tools/nsc/util/ProxyReport.scala
+++ b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
@@ -13,7 +13,7 @@ import scala.collection.{ mutable, immutable, generic }
trait ProxyReport {
val global: Global
import global._
- import definitions.{ getClass => gc, _ }
+ import definitions._
private object classes {
def isIgnorable(sym: Symbol) = sym :: sym.allOverriddenSymbols exists { s =>
@@ -26,13 +26,13 @@ trait ProxyReport {
methods foreach (m => m.initialize.info.paramss.flatten foreach (_.initialize))
methods
}
- lazy val GlobalClass = gc(classOf[Global].getName)
- lazy val GenericClass = getModule("scala.collection.generic").moduleClass
- lazy val CollectionClass = getModule("scala.collection").moduleClass
+ lazy val GlobalClass = getRequiredClass(classOf[Global].getName)
+ lazy val GenericClass = getRequiredModule("scala.collection.generic").moduleClass
+ lazy val CollectionClass = getRequiredModule("scala.collection").moduleClass
- def getType(name: String) = getMember(GlobalClass, name.toTypeName)
- def getColl(name: String) = getMember(CollectionClass, name.toTypeName)
- def getGeneric(name: String) = getMember(GenericClass, name.toTypeName)
+ def getType(name: String) = getMember(GlobalClass, newTypeName(name))
+ def getColl(name: String) = getMember(CollectionClass, newTypeName(name))
+ def getGeneric(name: String) = getMember(GenericClass, newTypeName(name))
// the following operations + those in RewrappingTypeProxy are all operations
// in class Type that are overridden in some subclass
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index 4405b3457b..e1ae96da8c 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -34,7 +34,7 @@ abstract class SourceFile {
* For regular source files, simply return the argument.
*/
def positionInUltimateSource(position: Position) = position
- override def toString(): String = file.name /* + ":" + content.length */
+ override def toString() = file.name
def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
def path = file.path
@@ -61,7 +61,7 @@ object NoSourceFile extends SourceFile {
def length = -1
def offsetToLine(offset: Int) = -1
def lineToOffset(index : Int) = -1
- override def toString = "NoSourceFile"
+ override def toString = "<no source file>"
}
object NoFile extends VirtualFile("<no file>", "<no file>")
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index 27239b9b9f..f7c27dceb5 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -20,7 +20,7 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val typedSelectCount = new Counter
val typerNanos = new Timer
val classReadNanos = new Timer
-
+
val failedApplyNanos = new Timer
val failedOpEqNanos = new Timer
val failedSilentNanos = new Timer
@@ -48,6 +48,7 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val subtypeImprovCount = new SubCounter(subtypeCount)
val subtypeETNanos = new Timer
val matchesPtNanos = new Timer
+ val isReferencedNanos = new Timer
val ctr1 = new Counter
val ctr2 = new Counter
val ctr3 = new Counter
@@ -137,6 +138,7 @@ abstract class StatisticsInfo {
inform("time spent in failed : "+showRelTyper(failedSilentNanos))
inform(" failed apply : "+showRelTyper(failedApplyNanos))
inform(" failed op= : "+showRelTyper(failedOpEqNanos))
+ inform("time spent ref scanning : "+showRelTyper(isReferencedNanos))
inform("micros by tree node : "+showCounts(microsByType))
inform("#visits by tree node : "+showCounts(visitsByType))
val average = new ClassCounts
diff --git a/src/compiler/scala/tools/reflect/Mock.scala b/src/compiler/scala/tools/reflect/Mock.scala
index 5301816b4b..52c052b8a2 100644
--- a/src/compiler/scala/tools/reflect/Mock.scala
+++ b/src/compiler/scala/tools/reflect/Mock.scala
@@ -25,7 +25,8 @@ trait Mock extends (Invoked => AnyRef) {
def newInvocationHandler() = new InvocationHandler {
def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]) =
- mock(Invoked(proxy, method, args))
+ try { mock(Invoked(proxy, method, args)) }
+ catch { case _: NoClassDefFoundError => sys.exit(1) }
}
}
diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala
index b705a1eac4..5f152ecabb 100644
--- a/src/compiler/scala/tools/util/EditDistance.scala
+++ b/src/compiler/scala/tools/util/EditDistance.scala
@@ -7,6 +7,8 @@ package scala.tools
package util
object EditDistance {
+ import java.lang.Character.{ toLowerCase => lower }
+
def similarString(name: String, allowed: TraversableOnce[String]): String = {
val suggested = suggestions(name, allowed.toSeq, maxDistance = 1, maxSuggestions = 2)
if (suggested.isEmpty) ""
@@ -30,23 +32,37 @@ object EditDistance {
if (m == 0) return n
val d = Array.ofDim[Int](n + 1, m + 1)
- 0 to n foreach (x => d(x)(0) = x)
- 0 to m foreach (x => d(0)(x) = x)
+ var i = 0
+ val max = math.max(m, n)
+ while (i <= max) {
+ if (i <= n)
+ d(i)(0) = i
+ if (i <= m)
+ d(0)(i) = i
+ i += 1
+ }
+ i = 1
- for (i <- 1 to n ; s_i = s(i - 1) ; j <- 1 to m) {
- val t_j = t(j - 1)
- val cost = if (s_i == t_j) 0 else 1
+ while (i <= n) {
+ val s_i = s(i - 1)
+ var j = 1
+ while (j <= m) {
+ val t_j = t(j - 1)
+ val cost = if (lower(s_i) == lower(t_j)) 0 else 1
- val c1 = d(i - 1)(j) + 1
- val c2 = d(i)(j - 1) + 1
- val c3 = d(i - 1)(j - 1) + cost
+ val c1 = d(i - 1)(j) + 1
+ val c2 = d(i)(j - 1) + 1
+ val c3 = d(i - 1)(j - 1) + cost
- d(i)(j) = c1 min c2 min c3
+ d(i)(j) = c1 min c2 min c3
- if (transpositions) {
- if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
- d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
+ if (transpositions) {
+ if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
+ d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
+ }
+ j += 1
}
+ i += 1
}
d(n)(m)
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 0c359a2619..6d5988d1dd 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -36,17 +36,21 @@ class JavapClass(
lazy val parser = new JpOptions
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val EnvCtr = EnvClass.getConstructor(List[Class[_]](): _*)
-
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
+ private def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = (
+ if (failed) null
+ else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
+ )
def findBytes(path: String): Array[Byte] =
tryFile(path) getOrElse tryClass(path)
def apply(args: Seq[String]): List[JpResult] = {
- args.toList filterNot (_ startsWith "-") map { path =>
+ if (failed) Nil
+ else args.toList filterNot (_ startsWith "-") map { path =>
val bytes = findBytes(path)
if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
@@ -54,12 +58,14 @@ class JavapClass(
}
def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- PrinterCtr.newInstance(in, printWriter, env)
+ if (failed) null
+ else PrinterCtr.newInstance(in, printWriter, env)
def newEnv(opts: Seq[String]): FakeEnvironment = {
- val env: FakeEnvironment = EnvClass.newInstance()
+ lazy val env: FakeEnvironment = EnvClass.newInstance()
- parser(opts) foreach { case (name, value) =>
+ if (failed) null
+ else parser(opts) foreach { case (name, value) =>
val field = EnvClass getDeclaredField name
field setAccessible true
field.set(env, value.asInstanceOf[AnyRef])
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index 2773aad87f..02eb364abe 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -17,9 +17,10 @@ package util
* @version 1.0
*/
trait StringOps {
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: Seq[String], sep: String) = oempty(xs: _*) mkString sep
+ def onull(s: String) = if (s == null) "" else s
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+ def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
val ys = oempty(xs: _*)
if (ys.isEmpty) orElse else ys mkString sep
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index f4481b800e..8bbda5dd05 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -12,22 +12,37 @@ trait CPSUtils {
var cpsEnabled = true
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
def vprintln(x: =>Any): Unit = if (verbose) println(x)
+
+ object cpsNames {
+ val catches = newTermName("$catches")
+ val ex = newTermName("$ex")
+ val flatMapCatch = newTermName("flatMapCatch")
+ val getTrivialValue = newTermName("getTrivialValue")
+ val isTrivial = newTermName("isTrivial")
+ val reify = newTermName("reify")
+ val reifyR = newTermName("reifyR")
+ val shift = newTermName("shift")
+ val shiftR = newTermName("shiftR")
+ val shiftSuffix = newTermName("$shift")
+ val shiftUnit = newTermName("shiftUnit")
+ val shiftUnitR = newTermName("shiftUnitR")
+ }
- lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
- lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
- lazy val MarkerCPSSynth = definitions.getClass("scala.util.continuations.cpsSynth")
- lazy val MarkerCPSAdaptPlus = definitions.getClass("scala.util.continuations.cpsPlus")
- lazy val MarkerCPSAdaptMinus = definitions.getClass("scala.util.continuations.cpsMinus")
-
- lazy val Context = definitions.getClass("scala.util.continuations.ControlContext")
- lazy val ModCPS = definitions.getModule("scala.util.continuations")
-
- lazy val MethShiftUnit = definitions.getMember(ModCPS, "shiftUnit")
- lazy val MethShiftUnitR = definitions.getMember(ModCPS, "shiftUnitR")
- lazy val MethShift = definitions.getMember(ModCPS, "shift")
- lazy val MethShiftR = definitions.getMember(ModCPS, "shiftR")
- lazy val MethReify = definitions.getMember(ModCPS, "reify")
- lazy val MethReifyR = definitions.getMember(ModCPS, "reifyR")
+ lazy val MarkerCPSSym = definitions.getRequiredClass("scala.util.continuations.cpsSym")
+ lazy val MarkerCPSTypes = definitions.getRequiredClass("scala.util.continuations.cpsParam")
+ lazy val MarkerCPSSynth = definitions.getRequiredClass("scala.util.continuations.cpsSynth")
+ lazy val MarkerCPSAdaptPlus = definitions.getRequiredClass("scala.util.continuations.cpsPlus")
+ lazy val MarkerCPSAdaptMinus = definitions.getRequiredClass("scala.util.continuations.cpsMinus")
+
+ lazy val Context = definitions.getRequiredClass("scala.util.continuations.ControlContext")
+ lazy val ModCPS = definitions.getRequiredModule("scala.util.continuations")
+
+ lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit)
+ lazy val MethShiftUnitR = definitions.getMember(ModCPS, cpsNames.shiftUnitR)
+ lazy val MethShift = definitions.getMember(ModCPS, cpsNames.shift)
+ lazy val MethShiftR = definitions.getMember(ModCPS, cpsNames.shiftR)
+ lazy val MethReify = definitions.getMember(ModCPS, cpsNames.reify)
+ lazy val MethReifyR = definitions.getMember(ModCPS, cpsNames.reifyR)
lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index b383227243..cea558d2d3 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -215,13 +215,9 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
case ldef @ LabelDef(name, params, rhs) =>
if (hasAnswerTypeAnn(tree.tpe)) {
- val sym = currentOwner.newMethod(tree.pos, name)
- .setInfo(ldef.symbol.info)
- .setFlag(Flags.SYNTHETIC)
-
- val rhs1 = new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
- val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe))
- new ChangeOwnerTraverser(currentOwner, sym) traverse rhsVal
+ val sym = currentOwner.newMethod(name, tree.pos, Flags.SYNTHETIC) setInfo ldef.symbol.info
+ val rhs1 = new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
+ val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe)) changeOwner (currentOwner -> sym)
val stm1 = localTyper.typed(DefDef(sym, rhsVal))
val expr = localTyper.typed(Apply(Ident(sym), List()))
@@ -355,12 +351,12 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val valueTpe = removeAllCPSAnnotations(expr.tpe)
- val sym = currentOwner.newValue(tree.pos, unit.fresh.newName("tmp"))
- .setInfo(valueTpe)
- .setFlag(Flags.SYNTHETIC)
- .setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
-
- new ChangeOwnerTraverser(currentOwner, sym) traverse expr
+ val sym: Symbol = (
+ currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC)
+ setInfo valueTpe
+ setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))
+ )
+ expr.changeOwner(currentOwner -> sym)
(stms ::: List(ValDef(sym, expr) setType(NoType)),
Ident(sym) setType(valueTpe) setPos(tree.pos), linearize(spc, spcVal)(unit, tree.pos))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index f0c389bb11..b2a1546b4e 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -192,21 +192,20 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// val expr2 = if (catches.nonEmpty) {
val pos = catches.head.pos
- val argSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val argSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
val rhs = Match(Ident(argSym), catches1)
val fun = Function(List(ValDef(argSym)), rhs)
- val funSym = currentOwner.newValueParameter(pos, "$catches").setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
- val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member("flatMapCatch")), List(Ident(funSym))) })
+ val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym))) })
argSym.owner = fun.symbol
- val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
- chown.traverse(rhs)
+ rhs.changeOwner(currentOwner -> fun.symbol)
- val exSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
val catch2 = { localTyper.typedCases(tree, List(
CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
- Apply(Select(Ident(funSym), "isDefinedAt"), List(Ident(exSym))),
+ Apply(Select(Ident(funSym), nme.isDefinedAt), List(Ident(exSym))),
Apply(Ident(funSym), List(Ident(exSym))))
), ThrowableClass.tpe, targettp) }
@@ -263,8 +262,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val tpe = vd.symbol.tpe
val rhs1 = atOwner(vd.symbol) { transform(rhs) }
-
- new ChangeOwnerTraverser(vd.symbol, currentOwner).traverse(rhs1) // TODO: don't traverse twice
+ rhs1.changeOwner(vd.symbol -> currentOwner) // TODO: don't traverse twice
log("valdef symbol " + vd.symbol + " has type " + tpe)
log("right hand side " + rhs1 + " has type " + rhs1.tpe)
@@ -302,11 +300,11 @@ abstract class SelectiveCPSTransform extends PluginComponent with
}
def applyCombinatorFun(ctxR: Tree, body: Tree) = {
- val arg = currentOwner.newValueParameter(ctxR.pos, name).setInfo(tpe)
+ val arg = currentOwner.newValueParameter(name, ctxR.pos).setInfo(tpe)
val body1 = (new TreeSymSubstituter(List(vd.symbol), List(arg)))(body)
val fun = localTyper.typed(atPos(vd.symbol.pos) { Function(List(ValDef(arg)), body1) }) // types body as well
arg.owner = fun.symbol
- new ChangeOwnerTraverser(currentOwner, fun.symbol).traverse(body1)
+ body1.changeOwner(currentOwner -> fun.symbol)
// see note about multiple traversals above
@@ -317,11 +315,11 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("fun.tpe:"+fun.tpe)
log("return type of fun:"+body1.tpe)
- var methodName = "map"
+ var methodName = nme.map
if (body1.tpe != null) {
if (body1.tpe.typeSymbol == Context)
- methodName = "flatMap"
+ methodName = nme.flatMap
}
else
unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
@@ -347,14 +345,14 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// val <lhs> = ctx.getTrivialValue; ... <--- TODO: try/catch ??? don't bother for the moment...
// else
// ctx.flatMap { <lhs> => ... }
- val ctxSym = currentOwner.newValue(vd.symbol.name + "$shift").setInfo(rhs1.tpe)
+ val ctxSym = currentOwner.newValue(vd.symbol.name append cpsNames.shiftSuffix).setInfo(rhs1.tpe)
val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
def ctxRef = localTyper.typed(Ident(ctxSym))
val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
- val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member("getTrivialValue"))))
+ val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
val switchExpr = localTyper.typed(atPos(vd.symbol.pos) {
val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
- If(Select(ctxRef, ctxSym.tpe.member("isTrivial")),
+ If(Select(ctxRef, ctxSym.tpe.member(cpsNames.isTrivial)),
applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
applyCombinatorFun(ctxRef, body2))
})
diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala
index e9cd474b82..fee2c5a273 100644
--- a/src/detach/plugin/scala/tools/detach/Detach.scala
+++ b/src/detach/plugin/scala/tools/detach/Detach.scala
@@ -735,7 +735,7 @@ abstract class Detach extends PluginComponent
iface.sourceFile = clazz.sourceFile
iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface
val iparents = List(ObjectClass.tpe, RemoteClass.tpe, ScalaObjectClass.tpe)
- iface setInfo ClassInfoType(iparents, new Scope, iface)
+ iface setInfo ClassInfoType(iparents, newScope, iface)
// methods must throw RemoteException
iface addAnnotation remoteAnnotationInfo
@@ -749,7 +749,7 @@ abstract class Detach extends PluginComponent
// Variant 2: un-/exportObject
//val cparents = List(ObjectClass.tpe, iface.tpe,
// UnreferencedClass.tpe, ScalaObjectClass.tpe)
- iclaz setInfo ClassInfoType(cparents, new Scope, iclaz)
+ iclaz setInfo ClassInfoType(cparents, newScope, iclaz)
val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol])
proxies(clazz) = proxy
proxy
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
index 8b0338ed29..d4c5417260 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
@@ -596,6 +596,16 @@ public class JExtendedCode extends JCode {
double minDensity) {
assert keys.length == branches.length;
+ //The special case for empty keys. It makes sense to allow
+ //empty keys and generate LOOKUPSWITCH with defaultBranch
+ //only. This is exactly what javac does for switch statement
+ //that has only a default case.
+ if (keys.length == 0) {
+ emitLOOKUPSWITCH(keys, branches, defaultBranch);
+ return;
+ }
+ //the rest of the code assumes that keys.length > 0
+
// sorting the tables
// FIXME use quicksort
for (int i = 1; i < keys.length; i++) {
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 4d32f0e2e1..93b6285cfb 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -32,7 +32,7 @@
<component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
<component name="EntryPointsManager">
<entry_points version="2.0" />
- </component>
+ </component>
<component name="InspectionProjectProfileManager">
<profiles>
<profile version="1.0" is_locked="false">
@@ -201,6 +201,7 @@
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
</modules>
diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE
new file mode 100644
index 0000000000..8ea9d0dd71
--- /dev/null
+++ b/src/intellij/scala.iml.SAMPLE
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../.." />
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt b/src/library/rootdoc.txt
index 6145429f1e..6145429f1e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt
+++ b/src/library/rootdoc.txt
diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala
index d6cb498185..47555938a0 100644
--- a/src/library/scala/AnyValCompanion.scala
+++ b/src/library/scala/AnyValCompanion.scala
@@ -18,4 +18,4 @@ package scala
* }}}
*
*/
-private[scala] trait AnyValCompanion extends SpecializableCompanion { }
+private[scala] trait AnyValCompanion extends Specializable { }
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index f68bbcc454..508ef25e81 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,18 +6,18 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Sun Jul 31 00:37:30 CEST 2011
+// genprod generated these sources at: Tue Feb 14 16:49:03 PST 2012
package scala
/** A function of 0 parameters.
- *
+ *
* In the following example, the definition of javaVersion is a
* shorthand for the anonymous class definition anonfun0:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val javaVersion = () => sys.props("java.version")
*
* val anonfun0 = new Function0[String] {
@@ -26,12 +26,18 @@ package scala
* assert(javaVersion() == anonfun0())
* }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
trait Function0[@specialized +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(): R
-
+
override def toString() = "<function0>"
}
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index dc8e67bbb0..06936e54cb 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 1 parameter.
- *
+ *
* In the following example, the definition of succ is a
* shorthand for the anonymous class definition anonfun1:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
* def apply(x: Int): Int = x + 1
@@ -24,14 +24,20 @@ package scala
* assert(succ(0) == anonfun1(0))
* }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
-trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends AnyRef { self =>
/** Apply the body of this function to the argument.
* @return the result of function application.
*/
def apply(v1: T1): R
-
+
/** Composes two instances of Function1 in a new Function1, with this function applied last.
*
* @tparam A the type to which function `g` can be applied
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index 6f17606afd..9e107fc53d 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -18,12 +18,10 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
}
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 7a73bd35bf..783a86ab5d 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -18,12 +18,10 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
}
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index c099c0436a..7f4dee6216 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -18,12 +18,10 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
}
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index f13db28f30..23853dde69 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -18,12 +18,10 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
}
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index d0345cc552..372f1cfafb 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -18,12 +18,10 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
}
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index 69ff039f5b..47c7309695 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -18,12 +18,10 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
}
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index d544d89303..8eea42de5b 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -18,12 +18,10 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
}
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 16c71e7ada..2d93af34f2 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -18,12 +18,10 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
}
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index dfd70c2353..ffca98c443 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -18,12 +18,10 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
}
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 63decd03ad..f661ea7707 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -18,12 +18,10 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
}
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index a4ad87fa97..1812f042e0 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -11,12 +11,12 @@ package scala
/** A function of 2 parameters.
- *
+ *
* In the following example, the definition of max is a
* shorthand for the anonymous class definition anonfun2:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
@@ -25,18 +25,22 @@ package scala
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2) == apply(x1, x2)`
- */
- def curried: T1 => T2 => R = {
+ */ def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 7219c9be81..e4fb9f280c 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -18,12 +18,10 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
}
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index c7d55960db..9823386856 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -18,12 +18,10 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 196421c830..e708f7f49a 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -18,12 +18,10 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
}
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index 09a5aa5828..62a997c1b5 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -18,12 +18,10 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)`
- */
- def curried: T1 => T2 => T3 => R = {
+ */ def curried: T1 => T2 => T3 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
}
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index 00da84636a..86d2faeac8 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -18,12 +18,10 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)`
- */
- def curried: T1 => T2 => T3 => T4 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
}
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index 3915048906..bd9af77f12 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -18,12 +18,10 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 183a7332e1..4f601a468c 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -18,12 +18,10 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
}
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 10f8e9b599..6978b6545d 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -18,12 +18,10 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
}
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 8144b36101..903551d939 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -18,12 +18,10 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
}
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index ee04ed0915..0c273ba929 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -18,12 +18,10 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
- */
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
+ */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
}
diff --git a/src/library/scala/MatchingStrategy.scala b/src/library/scala/MatchingStrategy.scala
deleted file mode 100644
index d11598bad6..0000000000
--- a/src/library/scala/MatchingStrategy.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package scala
-
-abstract class MatchingStrategy[M[+x]] {
- // runs the matcher on the given input
- def runOrElse[T, U](in: T)(matcher: T => M[U]): U
-
- def zero: M[Nothing]
- def one[T](x: T): M[T]
- def guard[T](cond: Boolean, then: => T): M[T]
- def isSuccess[T, U](x: T)(f: T => M[U]): Boolean // used for isDefinedAt
-
- def caseResult[T](x: T): M[T] = one(x) // used as a marker to distinguish the RHS of a case (case pat => RHS) and intermediate successes
- // when deriving a partial function from a pattern match,
- // we need to distinguish the RHS of a case, which should not be evaluated when computing isDefinedAt,
- // from an intermediate result (which must be computed)
-}
-
-object MatchingStrategy {
- implicit object OptionMatchingStrategy extends MatchingStrategy[Option] {
- type M[+x] = Option[x]
- @inline def runOrElse[T, U](x: T)(f: T => M[U]): U = f(x) getOrElse (throw new MatchError(x))
- @inline def zero: M[Nothing] = None
- @inline def one[T](x: T): M[T] = Some(x)
- @inline def guard[T](cond: Boolean, then: => T): M[T] = if(cond) Some(then) else None
- @inline def isSuccess[T, U](x: T)(f: T => M[U]): Boolean = !f(x).isEmpty
- }
-} \ No newline at end of file
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index bd498de847..6db4904b93 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -192,6 +192,13 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def exists(p: A => Boolean): Boolean =
!isEmpty && p(this.get)
+ /** Returns true if this option is empty '''or''' the predicate
+ * $p returns true when applied to this $option's value.
+ *
+ * @param p the predicate to test
+ */
+ @inline final def forall(p: A => Boolean): Boolean = isEmpty || p(this.get)
+
/** Apply the given procedure $f to the option's value,
* if it is nonempty. Otherwise, do nothing.
*
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index b2910c2278..70caff0221 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -13,6 +13,36 @@ package scala
* The function `isDefinedAt` allows to test dynamically if a value is in
* the domain of the function.
*
+ * Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may
+ * still throw an exception, so the following code is legal:
+ *
+ * {{{
+ * val f: PartialFunction[Int, Any] = { case _ => 1/0 }
+ * }}}
+ *
+ * The main distinction between `PartialFunction` and [[scala.Function1]] is
+ * that the user of a `PartialFunction` may choose to do something different
+ * with input that is declared to be outside its domain. For example:
+ *
+ * {{{
+ * val sample = 1 to 10
+ * val isEven: PartialFunction[Int, String] = {
+ * case x if x % 2 == 0 => x+" is even"
+ * }
+ *
+ * // the method collect can use isDefinedAt to select which members to collect
+ * val evenNumbers = sample collect isEven
+ *
+ * val isOdd: PartialFunction[Int, String] = {
+ * case x if x % 2 == 1 => x+" is odd"
+ * }
+ *
+ * // the method orElse allows chaining another partial function to handle
+ * // input outside the declared domain
+ * val numbers = sample map (isEven orElse isOdd)
+ * }}}
+ *
+ *
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index b175fb9e1d..a2ee76500c 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -95,7 +95,8 @@ object Predef extends LowPriorityImplicits {
type Set[A] = immutable.Set[A]
val Map = immutable.Map
val Set = immutable.Set
- val AnyRef = new SpecializableCompanion {} // a dummy used by the specialization annotation
+ // @deprecated("Use scala.AnyRef instead", "2.10.0")
+ // def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
type ClassManifest[T] = scala.reflect.ClassManifest[T]
@@ -269,7 +270,7 @@ object Predef extends LowPriorityImplicits {
def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
def readLine(): String = Console.readLine()
- def readLine(text: String, args: Any*) = Console.readLine(text, args)
+ def readLine(text: String, args: Any*) = Console.readLine(text, args: _*)
def readBoolean() = Console.readBoolean()
def readByte() = Console.readByte()
def readShort() = Console.readShort()
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index ab8b0a4505..0106ad34ee 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -23,7 +23,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
override def productArity = 1
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case _ => throw new IndexOutOfBoundsException(n.toString())
}
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 536fb2fed9..ca53b580c0 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -23,7 +23,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
override def productArity = 10
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 7d49eccc5e..3d5942f3fa 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -23,7 +23,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
override def productArity = 11
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 0e9c4a01a2..803193793c 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -23,7 +23,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
override def productArity = 12
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index a0629201d0..0c1d889624 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -23,7 +23,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 13
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 32dda81c3e..0222309a0a 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -23,7 +23,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 14
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 57851f9870..41be7ec504 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -23,7 +23,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 15
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index 75076f3b3c..accee3f965 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -23,7 +23,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 16
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index 9ee6072ffe..da80ae9a6b 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -23,7 +23,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 17
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 25d0839af1..ea25647762 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -23,7 +23,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 18
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 5464de7264..5d4347c1a8 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -23,7 +23,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 19
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 8097245926..4e6c70f463 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -23,7 +23,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
override def productArity = 2
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index b094e09aca..f23a0dee3a 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -23,7 +23,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 20
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index fa06cfb438..4a4fe0697f 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -23,7 +23,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 21
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 46038bf1a2..7ee01b85ae 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -23,7 +23,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
override def productArity = 22
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 3a4cd8fc5e..23563c9e23 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -23,7 +23,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
override def productArity = 3
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product3[+T1, +T2, +T3] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index a4d47457fa..4abaa9051b 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -23,7 +23,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
override def productArity = 4
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 9f25e70af0..9aa4af58b7 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -23,7 +23,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
override def productArity = 5
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 87fd318c68..2ca1d7c31e 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -23,7 +23,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
override def productArity = 6
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index d074503315..b7af2d3e32 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -23,7 +23,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
override def productArity = 7
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index bd6150c235..17b5e48512 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -23,7 +23,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
override def productArity = 8
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 1f042944cc..784e9a7029 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -23,7 +23,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
override def productArity = 9
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
@@ -33,7 +33,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
new file mode 100644
index 0000000000..811a735110
--- /dev/null
+++ b/src/library/scala/Specializable.scala
@@ -0,0 +1,29 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** A common supertype for companions of specializable types.
+ * Should not be extended in user code.
+ */
+trait Specializable extends SpecializableCompanion
+
+object Specializable {
+ // No type parameter in @specialized annotation.
+ trait SpecializedGroup { }
+
+ // Smuggle a list of types by way of a tuple upon which Group is parameterized.
+ class Group[T >: Null](value: T) extends SpecializedGroup { }
+
+ final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)
+ final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)
+ final val Bits32AndUp = new Group(Int, Long, Float, Double)
+ final val Integral = new Group(Byte, Short, Int, Long, Char)
+ final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double)
+ final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef)
+}
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala
index fbdf42fd0b..ec797c1f15 100644
--- a/src/library/scala/SpecializableCompanion.scala
+++ b/src/library/scala/SpecializableCompanion.scala
@@ -10,4 +10,5 @@ package scala
/** A common supertype for companion classes which specialization takes into account.
*/
+@deprecated("Use Specializable instead", "2.10.0")
private[scala] trait SpecializableCompanion
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
new file mode 100644
index 0000000000..6116547aa2
--- /dev/null
+++ b/src/library/scala/StringContext.scala
@@ -0,0 +1,191 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+import collection.mutable.ArrayBuffer
+
+/** A class to support string interpolation.
+ * This class supports string interpolation as outlined in Scala SIP-11.
+ * It needs to be fully documented once the SIP is accepted.
+ *
+ * @param parts The parts that make up the interpolated string,
+ * without the expressions that get inserted by interpolation.
+ */
+case class StringContext(parts: String*) {
+
+ import StringContext._
+
+ /** Checks that the given arguments `args` number one less than the number
+ * of `parts` supplied to the enclosing `StringContext`.
+ * @param `args` The arguments to be checked.
+ * @throws An `IllegalArgumentException` if this is not the case.
+ */
+ def checkLengths(args: Any*): Unit =
+ if (parts.length != args.length + 1)
+ throw new IllegalArgumentException("wrong number of arguments for interpolated string")
+
+
+ /** The simple string interpolator.
+ *
+ * It inserts its arguments between corresponding parts of the string context.
+ * It also treats standard escape sequences as defined in the Scala specification.
+ * @param `args` The arguments to be inserted into the resulting string.
+ * @throws An `IllegalArgumentException`
+ * if the number of `parts` in the enclosing `StringContext` does not exceed
+ * the number of arguments `arg` by exactly 1.
+ * @throws A `StringContext.InvalidEscapeException` if if a `parts` string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ */
+ def s(args: Any*) = {
+ checkLengths(args: _*)
+ val pi = parts.iterator
+ val ai = args.iterator
+ val bldr = new java.lang.StringBuilder(treatEscapes(pi.next))
+ while (ai.hasNext) {
+ bldr append ai.next
+ bldr append treatEscapes(pi.next)
+ }
+ bldr.toString
+ }
+
+ /** The formatted string interpolator.
+ *
+ * It inserts its arguments between corresponding parts of the string context.
+ * It also treats standard escape sequences as defined in the Scala specification.
+ * Finally, if an interpolated expression is followed by a `parts` string
+ * that starts with a formatting specifier, the expression is formatted according to that
+ * specifier. All specifiers allowed in Java format strings are handled, and in the same
+ * way they are treated in Java.
+ *
+ * @param `args` The arguments to be inserted into the resulting string.
+ * @throws An `IllegalArgumentException`
+ * if the number of `parts` in the enclosing `StringContext` does not exceed
+ * the number of arguments `arg` by exactly 1.
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ *
+ * Note: The `f` method works by assembling a format string from all the `parts` strings and using
+ * `java.lang.String.format` to format all arguments with that format string. The format string is
+ * obtained by concatenating all `parts` strings, and performing two transformations:
+ *
+ * 1. Let a _formatting position_ be a start of any `parts` string except the first one.
+ * If a formatting position does not refer to a `%` character (which is assumed to
+ * start a format specifier), then the string format specifier `%s` is inserted.
+ *
+ * 2. Any `%` characters not in formatting positions are left in the resulting
+ * string literally. This is achieved by replacing each such occurrence by a string
+ * format specifier `%s` and adding a corresponding argument string `"%"`.
+ */
+ def f(args: Any*) = {
+ checkLengths(args: _*)
+ val pi = parts.iterator
+ val ai = args.iterator
+ val bldr = new java.lang.StringBuilder
+ val args1 = new ArrayBuffer[Any]
+ def copyString(first: Boolean): Unit = {
+ val str = treatEscapes(pi.next)
+ var start = 0
+ var idx = 0
+ if (!first) {
+ if ((str charAt 0) != '%')
+ bldr append "%s"
+ idx = 1
+ }
+ val len = str.length
+ while (idx < len) {
+ if (str(idx) == '%') {
+ bldr append (str substring (start, idx)) append "%s"
+ args1 += "%"
+ start = idx + 1
+ }
+ idx += 1
+ }
+ bldr append (str substring (start, idx))
+ }
+ copyString(first = true)
+ while (pi.hasNext) {
+ args1 += ai.next
+ copyString(first = false)
+ }
+ bldr.toString format (args1: _*)
+ }
+}
+
+object StringContext {
+
+ /** An exception that is thrown if a string contains a backslash (`\`) character that
+ * that does not start a valid escape sequence.
+ * @param str The offending string
+ * @param idx The index of the offending backslash character in `str`.
+ */
+ class InvalidEscapeException(str: String, idx: Int)
+ extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+
+ /** Expands standard Scala escape sequences in a string.
+ * Escape sequences are:
+ * control: `\b`, `\t`, `\n`, `\f`, `\r`
+ * escape: `\\`, `\"`, `\'`
+ * octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
+ *
+ * @param A string that may contain escape sequences
+ * @return The string with all escape sequences expanded.
+ */
+ def treatEscapes(str: String): String = {
+ lazy val bldr = new java.lang.StringBuilder
+ val len = str.length
+ var start = 0
+ var cur = 0
+ var idx = 0
+ def output(ch: Char) = {
+ bldr append str substring (start, cur)
+ bldr append ch
+ start = idx
+ }
+ while (idx < len) {
+ cur = idx
+ if (str(idx) == '\\') {
+ idx += 1
+ if ('0' <= str(idx) && str(idx) <= '7') {
+ val leadch = str(idx)
+ var oct = leadch - '0'
+ idx += 1
+ if ('0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ if (leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ }
+ }
+ output(oct.toChar)
+ } else {
+ val ch = str(idx)
+ idx += 1
+ output {
+ ch match {
+ case 'b' => '\b'
+ case 't' => '\t'
+ case 'n' => '\n'
+ case 'f' => '\f'
+ case 'r' => '\r'
+ case '\"' => '\"'
+ case '\'' => '\''
+ case '\\' => '\\'
+ case _ => throw new InvalidEscapeException(str, cur)
+ }
+ }
+ }
+ } else {
+ idx += 1
+ }
+ }
+ if (start == 0) str
+ else (bldr append str.substring(start, idx)).toString
+ }
+}
diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala
index 8a17ae87b0..8851f1ab91 100644
--- a/src/library/scala/Symbol.scala
+++ b/src/library/scala/Symbol.scala
@@ -31,8 +31,8 @@ final class Symbol private (val name: String) extends Serializable {
override def equals(other: Any) = this eq other.asInstanceOf[AnyRef]
}
-object Symbol extends UniquenessCache[String, Symbol]
-{
+object Symbol extends UniquenessCache[String, Symbol] {
+ override def apply(name: String): Symbol = super.apply(name)
protected def valueFromKey(name: String): Symbol = new Symbol(name)
protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name)
}
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 6d31d35e51..02fdd0cba5 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -19,5 +19,5 @@ case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
-
+
}
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index 10d554d467..ba2a02a8b2 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -28,5 +28,5 @@ case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")"
-
+
}
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 2065e4f017..7f51d172d4 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -29,5 +29,5 @@ case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1:
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")"
-
+
}
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index a463986752..4bbc6a0eab 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -31,5 +31,5 @@ case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")"
-
+
}
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 2bee0d69ad..77bd59bf2e 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -32,5 +32,5 @@ case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")"
-
+
}
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index 60f7c51e64..bf7a4ce016 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -33,5 +33,5 @@ case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")"
-
+
}
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index fc8e30580b..582c359bc6 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -34,5 +34,5 @@ case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")"
-
+
}
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index 80181f6648..a1e9a790ff 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -35,5 +35,5 @@ case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")"
-
+
}
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index 6236122be2..f531766c18 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -36,5 +36,5 @@ case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")"
-
+
}
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index dd6a819ac5..a96db25e4b 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -37,5 +37,5 @@ case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")"
-
+
}
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 65f0fd22cf..718280d68a 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -38,5 +38,5 @@ case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")"
-
+
}
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index dd6ac0cfd2..ad3f7df697 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -23,7 +23,7 @@ case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, D
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
-
+
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index cf3626909d..4a44c0bb89 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -39,5 +39,5 @@ case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")"
-
+
}
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 78b9c585c6..580a169e39 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -40,5 +40,5 @@ case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")"
-
+
}
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index 0993dfbbc3..fd3392ddea 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -41,5 +41,5 @@ case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 +
"," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")"
-
+
}
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index dfa0c962a2..0d5399308b 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -24,7 +24,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
+
@deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a919072c88..a859078bcf 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -22,5 +22,5 @@ case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
-
+
}
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 6a94f48ab4..1edfb673ee 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -23,5 +23,5 @@ case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T
extends Product5[T1, T2, T3, T4, T5]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")"
-
+
}
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 34f8224627..5b74937e58 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -24,5 +24,5 @@ case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4,
extends Product6[T1, T2, T3, T4, T5, T6]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")"
-
+
}
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index 6fc3477ba2..a7f572e9f0 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -25,5 +25,5 @@ case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4:
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")"
-
+
}
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 1e21b684fc..9bb427d689 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -26,5 +26,5 @@ case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")"
-
+
}
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 453cea31a1..4d50539e0c 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -27,5 +27,5 @@ case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")"
-
+
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 122eec2d90..c837775cf9 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -177,7 +177,28 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a function to all elements of this $coll
- * and concatenating the results.
+ * and using the elements of the resulting collections. For example:
+ *
+ * {{{
+ * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
+ * }}}
+ *
+ * The type of the resulting collection is guided by the static type of $coll. This might
+ * cause unexpected results sometimes. For example:
+ *
+ * {{{
+ * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
+ * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
+ *
+ * // lettersOf will return a Set[Char], not a Seq
+ * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
+ *
+ * // xs will be a an Iterable[Int]
+ * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
+ *
+ * // ys will be a Map[Int, Int]
+ * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
+ * }}}
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 3e42e7812b..305f8d768d 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -471,7 +471,7 @@ trait GenTraversableOnce[+A] {
* $willNotTerminateInf
* @return an indexed sequence containing all elements of this $coll.
*/
- def toIndexedSeq[A1 >: A]: immutable.IndexedSeq[A1]
+ def toIndexedSeq: immutable.IndexedSeq[A]
/** Converts this $coll to a stream.
* $willNotTerminateInf
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index e2541f2a66..196e77c91b 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -104,7 +104,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
override /*IterableLike*/
def slice(from: Int, until: Int): Repr = {
val lo = math.max(from, 0)
- val hi = math.min(until, length)
+ val hi = math.min(math.max(until, 0), length)
val elems = math.max(hi - lo, 0)
val b = newBuilder
b.sizeHint(elems)
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index 94f7e9701b..d213e60112 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -48,7 +48,8 @@ package scala.collection
* @author Martin Odersky
* @since 2.8.1
*/
-object JavaConverters {
+
+trait JavaConverters {
import java.{ lang => jl, util => ju }
import java.util.{ concurrent => juc }
import JavaConversions._
@@ -536,3 +537,5 @@ object JavaConverters {
propertiesAsScalaMapConverter(p)
}
+
+object JavaConverters extends JavaConverters \ No newline at end of file
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 75c1edac66..ceb980ff80 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -13,6 +13,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
+import annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
@@ -69,4 +70,9 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
xs
}
}
+
+ @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
+ if (this.isEmpty) that.isEmpty
+ else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p)
+ }
}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index b6b4bfb96d..02298ef096 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -151,8 +151,9 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
def next(): Repr = {
if (!hasNext)
Iterator.empty.next
-
- val result = (self.newBuilder ++= elms).result
+
+ val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
+ val result = (self.newBuilder ++= forcedElms).result
var i = idxs.length - 2
while(i >= 0 && idxs(i) >= idxs(i+1))
i -= 1
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index e2acc0b3e0..36d45c0c8a 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -535,7 +535,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
val b = newBuilder
var go = false
for (x <- this) {
- if (!p(x)) go = true
+ if (!go && !p(x)) go = true
if (go) b += x
}
b.result
@@ -709,6 +709,9 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* outer $coll containing this `WithFilter` instance that satisfy
* predicate `p` and concatenating the results.
*
+ * The type of the resulting collection will be guided by the static type
+ * of the outer $coll.
+ *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 908c4c808d..5bb2e563f6 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -245,7 +245,7 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def toSeq: Seq[A] = toStream
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq() ++ seq
def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 15565e57c6..e7e797391e 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -77,7 +77,7 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def toList: List[A] = self.toList
override def toIterable: Iterable[A] = self.toIterable
override def toSeq: Seq[A] = self.toSeq
- override def toIndexedSeq[B >: A] = self.toIndexedSeq
+ override def toIndexedSeq: immutable.IndexedSeq[A] = self.toIndexedSeq
override def toBuffer[B >: A] = self.toBuffer
override def toStream: Stream[A] = self.toStream
override def toSet[B >: A]: immutable.Set[B] = self.toSet
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 12c1a75c7a..7333074778 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -116,7 +116,19 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
}
/** Converts this $coll of traversable collections into
- * a $coll in which all element collections are concatenated.
+ * a $coll formed by the elements of these traversable
+ * collections.
+ *
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * // xs == List(1, 2, 3, 1, 2, 3)
+ *
+ * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * // ys == Set(1, 2, 3)
+ * }}}
*
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the element
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
new file mode 100644
index 0000000000..b235379575
--- /dev/null
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -0,0 +1,33 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import scala.collection.mutable.{ Builder, GrowingBuilder }
+
+/**
+ * @define Coll mutable.SortedSet
+ * @define coll mutable sorted
+ *
+ * @author Lucien Pereira
+ *
+ */
+abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] {
+
+ /**
+ * mutable.SetBuilder uses '+' which is not a primitive for anything extending mutable.SetLike,
+ * this causes serious perfomances issues since each time 'elems = elems + x'
+ * is evaluated elems is cloned (which is O(n)).
+ *
+ * Fortunately GrowingBuilder comes to rescue.
+ *
+ */
+ override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty)
+
+}
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 6529fe4a79..3d723a1feb 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -61,7 +61,7 @@ trait TraversableForwarder[+A] extends Traversable[A] {
override def toList: List[A] = underlying.toList
override def toIterable: Iterable[A] = underlying.toIterable
override def toSeq: Seq[A] = underlying.toSeq
- override def toIndexedSeq[B >: A] = underlying.toIndexedSeq
+ override def toIndexedSeq = underlying.toIndexedSeq
override def toBuffer[B >: A] = underlying.toBuffer
override def toStream: Stream[A] = underlying.toStream
override def toSet[B >: A]: immutable.Set[B] = underlying.toSet
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 55ce8fa822..6b11371bec 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -111,7 +111,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// TODO: add HashMap2, HashMap3, ...
- class HashMap1[A,+B](private[HashMap] var key: A, private[HashMap] var hash: Int, private[collection] var value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
+ class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
override def size = 1
private[collection] def getKey = key
@@ -138,8 +138,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
- if (merger eq null) new HashMap1(key, hash, value, kv)
- else new HashMap1(key, hash, value, merger(this.kv, kv))
+ if (merger eq null) {
+ if(this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this
+ else new HashMap1(key, hash, value, kv)
+ } else new HashMap1(key, hash, value, merger(this.kv, kv))
} else {
var thatindex = (hash >>> level) & 0x1f
var thisindex = (this.hash >>> level) & 0x1f
@@ -176,13 +178,14 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
+ // this method may be called multiple times in a multithreaded environment, but that's ok
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = {
that.updated0(key, hash, level, value, kv, merger)
}
}
- private[collection] class HashMapCollision1[A, +B](private[HashMap] var hash: Int, var kvs: ListMap[A, B @uV])
+ private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV])
extends HashMap[A, B @uV] {
override def size = kvs.size
@@ -227,9 +230,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
class HashTrieMap[A, +B](
- private[HashMap] var bitmap: Int,
- private[collection] var elems: Array[HashMap[A, B @uV]],
- private[HashMap] var size0: Int
+ private[collection] val bitmap: Int,
+ private[collection] val elems: Array[HashMap[A, B @uV]],
+ private[collection] val size0: Int
) extends HashMap[A, B @uV] {
/*
@@ -270,13 +273,15 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
if ((bitmap & mask) != 0) {
- val elemsNew = new Array[HashMap[A,B1]](elems.length)
- Array.copy(elems, 0, elemsNew, 0, elems.length)
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.updated0(key, hash, level + 5, value, kv, merger)
- elemsNew(offset) = subNew
- new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ if(subNew eq sub) this else {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ }
} else {
val elemsNew = new Array[HashMap[A,B1]](elems.length + 1)
Array.copy(elems, 0, elemsNew, 0, offset)
@@ -294,7 +299,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.removed0(key, hash, level + 5)
- if (subNew.isEmpty) {
+ if (subNew eq sub) this
+ else if (subNew.isEmpty) {
val bitmapNew = bitmap ^ mask
if (bitmapNew != 0) {
val elemsNew = new Array[HashMap[A,B]](elems.length - 1)
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 8cb19d4f31..79d2fb71cc 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -105,7 +105,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// TODO: add HashSet2, HashSet3, ...
- class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] {
+ class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] {
override def size = 1
override def get0(key: A, hash: Int, level: Int): Boolean =
@@ -131,7 +131,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def foreach[U](f: A => U): Unit = f(key)
}
- private[immutable] class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A])
+ private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A])
extends HashSet[A] {
override def size = ks.size
@@ -178,7 +178,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
- class HashTrieSet[A](private var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private var size0: Int)
+ class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int)
extends HashSet[A] {
override def size = size0
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index bbbef158af..e3939001d8 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -22,7 +22,7 @@ trait IndexedSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, IndexedSeq]
with IndexedSeqLike[A, IndexedSeq[A]] {
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
- override def toIndexedSeq[B >: A]: IndexedSeq[B] = this
+ override def toIndexedSeq: IndexedSeq[A] = this
override def seq: IndexedSeq[A] = this
}
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index c6f056bd81..5f3f9b717f 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -14,6 +14,7 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
import annotation.tailrec
+import java.io._
/** A class for immutable linked lists representing ordered collections
* of elements of type.
@@ -315,17 +316,27 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def head : B = hd
override def tail : List[B] = tl
override def isEmpty: Boolean = false
-
- import java.io._
-
+
private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = this
- while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
- out.writeObject(ListSerializeEnd)
+ out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
+ out.writeObject(this.hd)
+ out.writeObject(this.tl)
}
-
+
private def readObject(in: ObjectInputStream) {
- hd = in.readObject.asInstanceOf[B]
+ val obj = in.readObject()
+ if (obj == ListSerializeStart) {
+ this.hd = in.readObject().asInstanceOf[B]
+ this.tl = in.readObject().asInstanceOf[List[B]]
+ } else oldReadObject(in, obj)
+ }
+
+ /* The oldReadObject method exists here for compatibility reasons.
+ * :: objects used to be serialized by serializing all the elements to
+ * the output stream directly, but this was broken (see SI-5374).
+ */
+ private def oldReadObject(in: ObjectInputStream, firstObject: AnyRef) {
+ hd = firstObject.asInstanceOf[B]
assert(hd != ListSerializeEnd)
var current: ::[B] = this
while (true) in.readObject match {
@@ -338,6 +349,13 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
current = list
}
}
+
+ private def oldWriteObject(out: ObjectOutputStream) {
+ var xs: List[B] = this
+ while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
+ out.writeObject(ListSerializeEnd)
+ }
+
}
/** $factoryInfo
@@ -602,5 +620,10 @@ object List extends SeqFactory[List] {
}
/** Only used for list serialization */
+@SerialVersionUID(0L - 8287891243975527522L)
+private[scala] case object ListSerializeStart
+
+/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
+
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index c92c0268b6..7537558f0b 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -286,18 +286,36 @@ extends collection.AbstractSeq[Int]
object Range {
private[immutable] val MAX_PRINT = 512 // some arbitrary value
- /** Counts in "Long arithmetic" so we can recognize overflow.
+ /** Counts the number of range elements.
+ * @pre step != 0
+ * If the size of the range exceeds Int.MaxValue, the
+ * result will be negative.
*/
- def count(start: Int, end: Int, step: Int): Int =
- count(start, end, step, false)
-
def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = {
- // faster path for the common counting range
- if (start >= 0 && end > start && end < scala.Int.MaxValue && step == 1)
- (end - start) + ( if (isInclusive) 1 else 0 )
- else
- NumericRange.count[Long](start, end, step, isInclusive)
+ if (step == 0)
+ throw new IllegalArgumentException("step cannot be 0.")
+
+ val isEmpty = (
+ if (start == end) !isInclusive
+ else if (start < end) step < 0
+ else step > 0
+ )
+ if (isEmpty) 0
+ else {
+ // Counts with Longs so we can recognize too-large ranges.
+ val gap: Long = end.toLong - start.toLong
+ val jumps: Long = gap / step
+ // Whether the size of this range is one larger than the
+ // number of full-sized jumps.
+ val hasStub = isInclusive || (gap % step != 0)
+ val result: Long = jumps + ( if (hasStub) 1 else 0 )
+
+ if (result > scala.Int.MaxValue) -1
+ else result.toInt
+ }
}
+ def count(start: Int, end: Int, step: Int): Int =
+ count(start, end, step, false)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index e6587f9615..2eb2f8eb09 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -929,13 +929,19 @@ self =>
/** A specialized, extra-lazy implementation of a stream iterator, so it can
* iterate as lazily as it traverses the tail.
*/
-final class StreamIterator[+A](self: Stream[A]) extends AbstractIterator[A] with Iterator[A] {
+final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterator[A] {
+ def this(self: Stream[A]) {
+ this()
+ these = new LazyCell(self)
+ }
+
// A call-by-need cell.
class LazyCell(st: => Stream[A]) {
lazy val v = st
}
- private var these = new LazyCell(self)
+ private var these: LazyCell = _
+
def hasNext: Boolean = these.v.nonEmpty
def next(): A =
if (isEmpty) Iterator.empty.next
diff --git a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
index 5e1325fef6..471e977134 100644
--- a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
+++ b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
@@ -48,7 +48,7 @@ trait TraversableOnceMethods[+A] {
// conversions
def toArray[B >: A : ClassManifest]: Array[B]
def toBuffer[B >: A]: mutable.Buffer[B]
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B]
+ def toIndexedSeq: immutable.IndexedSeq[A]
def toIterable: Iterable[A]
def toIterator: Iterator[A]
def toList: List[A]
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
new file mode 100644
index 0000000000..ba2af8f120
--- /dev/null
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -0,0 +1,241 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+
+/**
+ * An immutable AVL Tree implementation used by mutable.TreeSet
+ *
+ * @author Lucien Pereira
+ *
+ */
+private[mutable] sealed trait AVLTree[+A] extends Serializable {
+ def balance: Int
+
+ def depth: Int
+
+ def iterator[B >: A]: Iterator[B] = Iterator.empty
+
+ def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false
+
+ /**
+ * Returns a new tree containing the given element.
+ * Thows an IllegalArgumentException if element is already present.
+ *
+ */
+ def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
+
+ /**
+ * Return a new tree which not contains given element.
+ *
+ */
+ def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
+ throw new NoSuchElementException(String.valueOf(value))
+
+ /**
+ * Return a tuple containing the smallest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
+
+ /**
+ * Return a tuple containing the biggest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
+
+ def rebalance[B >: A]: AVLTree[B] = this
+
+ def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+}
+
+private case object Leaf extends AVLTree[Nothing] {
+ override val balance: Int = 0
+
+ override val depth: Int = -1
+}
+
+private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] {
+ override val balance: Int = right.depth - left.depth
+
+ override val depth: Int = math.max(left.depth, right.depth) + 1
+
+ override def iterator[B >: A]: Iterator[B] = new AVLIterator(this)
+
+ override def contains[B >: A](value: B, ordering: Ordering[B]) = {
+ val ord = ordering.compare(value, data)
+ if (0 == ord)
+ true
+ else if (ord < 0)
+ left.contains(value, ordering)
+ else
+ right.contains(value, ordering)
+ }
+
+ /**
+ * Returns a new tree containing the given element.
+ * Thows an IllegalArgumentException if element is already present.
+ *
+ */
+ override def insert[B >: A](value: B, ordering: Ordering[B]) = {
+ val ord = ordering.compare(value, data)
+ if (0 == ord)
+ throw new IllegalArgumentException()
+ else if (ord < 0)
+ Node(data, left.insert(value, ordering), right).rebalance
+ else
+ Node(data, left, right.insert(value, ordering)).rebalance
+ }
+
+ /**
+ * Return a new tree which not contains given element.
+ *
+ */
+ override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
+ val ord = ordering.compare(value, data)
+ if(ord == 0) {
+ if (Leaf == left) {
+ if (Leaf == right) {
+ Leaf
+ } else {
+ val (min, newRight) = right.removeMin
+ Node(min, left, newRight).rebalance
+ }
+ } else {
+ val (max, newLeft) = left.removeMax
+ Node(max, newLeft, right).rebalance
+ }
+ } else if (ord < 0) {
+ Node(data, left.remove(value, ordering), right).rebalance
+ } else {
+ Node(data, left, right.remove(value, ordering)).rebalance
+ }
+ }
+
+ /**
+ * Return a tuple containing the smallest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ override def removeMin[B >: A]: (B, AVLTree[B]) = {
+ if (Leaf == left)
+ (data, right)
+ else {
+ val (min, newLeft) = left.removeMin
+ (min, Node(data, newLeft, right).rebalance)
+ }
+ }
+
+ /**
+ * Return a tuple containing the biggest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ override def removeMax[B >: A]: (B, AVLTree[B]) = {
+ if (Leaf == right)
+ (data, left)
+ else {
+ val (max, newRight) = right.removeMax
+ (max, Node(data, left, newRight).rebalance)
+ }
+ }
+
+ override def rebalance[B >: A] = {
+ if (-2 == balance) {
+ if (1 == left.balance)
+ doubleRightRotation
+ else
+ rightRotation
+ } else if (2 == balance) {
+ if (-1 == right.balance)
+ doubleLeftRotation
+ else
+ leftRotation
+ } else {
+ this
+ }
+ }
+
+ override def leftRotation[B >: A] = {
+ if (Leaf != right) {
+ val r: Node[A] = right.asInstanceOf[Node[A]]
+ Node(r.data, Node(data, left, r.left), r.right)
+ } else sys.error("Should not happen.")
+ }
+
+ override def rightRotation[B >: A] = {
+ if (Leaf != left) {
+ val l: Node[A] = left.asInstanceOf[Node[A]]
+ Node(l.data, l.left, Node(data, l.right, right))
+ } else sys.error("Should not happen.")
+ }
+
+ override def doubleLeftRotation[B >: A] = {
+ if (Leaf != right) {
+ val r: Node[A] = right.asInstanceOf[Node[A]]
+ // Let's save an instanceOf by 'inlining' the left rotation
+ val rightRotated = r.rightRotation
+ Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right)
+ } else sys.error("Should not happen.")
+ }
+
+ override def doubleRightRotation[B >: A] = {
+ if (Leaf != left) {
+ val l: Node[A] = left.asInstanceOf[Node[A]]
+ // Let's save an instanceOf by 'inlining' the right rotation
+ val leftRotated = l.leftRotation
+ Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right))
+ } else sys.error("Should not happen.")
+ }
+}
+
+private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
+ val stack = mutable.ArrayStack[Node[A]](root)
+ diveLeft()
+
+ private def diveLeft(): Unit = {
+ if (Leaf != stack.head.left) {
+ val left: Node[A] = stack.head.left.asInstanceOf[Node[A]]
+ stack.push(left)
+ diveLeft()
+ }
+ }
+
+ private def engageRight(): Unit = {
+ if (Leaf != stack.head.right) {
+ val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
+ stack.pop
+ stack.push(right)
+ diveLeft()
+ } else
+ stack.pop
+ }
+
+ override def hasNext: Boolean = !stack.isEmpty
+
+ override def next(): A = {
+ if (stack.isEmpty)
+ throw new NoSuchElementException()
+ else {
+ val result = stack.head.data
+ // Let's maintain stack for the next invocation
+ engageRight()
+ result
+ }
+ }
+}
diff --git a/src/library/scala/collection/mutable/BasicNode.java b/src/library/scala/collection/mutable/BasicNode.java
new file mode 100644
index 0000000000..c05009470a
--- /dev/null
+++ b/src/library/scala/collection/mutable/BasicNode.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+
+
+
+public abstract class BasicNode {
+
+ public abstract String string(int lev);
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/CNodeBase.java b/src/library/scala/collection/mutable/CNodeBase.java
new file mode 100644
index 0000000000..4374943b8d
--- /dev/null
+++ b/src/library/scala/collection/mutable/CNodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+
+
+
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
+
+ public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
+
+ public volatile int csize = -1;
+
+ public boolean CAS_SIZE(int oldval, int nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_SIZE(int nval) {
+ updater.set(this, nval);
+ }
+
+ public int READ_SIZE() {
+ return updater.get(this);
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala
new file mode 100644
index 0000000000..699b96b87c
--- /dev/null
+++ b/src/library/scala/collection/mutable/Ctrie.scala
@@ -0,0 +1,1075 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+
+
+import java.util.concurrent.atomic._
+import collection.immutable.{ ListMap => ImmutableListMap }
+import collection.parallel.mutable.ParCtrie
+import generic._
+import annotation.tailrec
+import annotation.switch
+
+
+
+private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
+ import INodeBase._
+
+ WRITE(bn)
+
+ def this(g: Gen) = this(null, g)
+
+ @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
+
+ @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
+
+ final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct)
+
+ @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = {
+ val m = /*READ*/mainnode
+ val prevval = /*READ*/m.prev
+ if (prevval eq null) m
+ else GCAS_Complete(m, ct)
+ }
+
+ @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else {
+ // complete the GCAS
+ val prev = /*READ*/m.prev
+ val ctr = ct.readRoot(true)
+
+ prev match {
+ case null =>
+ m
+ case fn: FailedNode[_, _] => // try to commit to previous value
+ if (CAS(m, fn.prev)) fn.prev
+ else GCAS_Complete(/*READ*/mainnode, ct)
+ case vn: MainNode[_, _] =>
+ // Assume that you've read the root from the generation G.
+ // Assume that the snapshot algorithm is correct.
+ // ==> you can only reach nodes in generations <= G.
+ // ==> `gen` is <= G.
+ // We know that `ctr.gen` is >= G.
+ // ==> if `ctr.gen` = `gen` then they are both equal to G.
+ // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G,
+ // or both
+ if ((ctr.gen eq gen) && ct.nonReadOnly) {
+ // try to commit
+ if (m.CAS_PREV(prev, null)) m
+ else GCAS_Complete(m, ct)
+ } else {
+ // try to abort
+ m.CAS_PREV(prev, new FailedNode(prev))
+ GCAS_Complete(/*READ*/mainnode, ct)
+ }
+ }
+ }
+
+ @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = {
+ n.WRITE_PREV(old)
+ if (CAS(old, n)) {
+ GCAS_Complete(n, ct)
+ /*READ*/n.prev eq null
+ } else false
+ }
+
+ @inline private def inode(cn: MainNode[K, V]) = {
+ val nin = new INode[K, V](gen)
+ nin.WRITE(cn)
+ nin
+ }
+
+ final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = {
+ val nin = new INode[K, V](ngen)
+ val main = GCAS_READ(ct)
+ nin.WRITE(main)
+ nin
+ }
+
+ /** Inserts a key value pair, overwriting the old pair if the keys match.
+ *
+ * @return true if successful, false otherwise
+ */
+ @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct)
+ else false
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && sn.k == k) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ GCAS(cn, nn, ct)
+ }
+ }
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ GCAS(cn, ncnode, ct)
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ false
+ case ln: LNode[K, V] => // 3) an l-node
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ }
+
+ /** Inserts a new key value pair, given that a specific condition is met.
+ *
+ * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
+ * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
+ */
+ @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] => cond match {
+ case null =>
+ if (sn.hc == hc && sn.k == k) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_ABSENT =>
+ if (sn.hc == hc && sn.k == k) Some(sn.v)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_PRESENT =>
+ if (sn.hc == hc && sn.k == k) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ case otherv: V =>
+ if (sn.hc == hc && sn.k == k && sn.v == otherv) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ }
+ }
+ } else cond match {
+ case null | INode.KEY_ABSENT =>
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ if (GCAS(cn, ncnode, ct)) None else null
+ case INode.KEY_PRESENT => None
+ case otherv: V => None
+ }
+ case sn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] => // 3) an l-node
+ @inline def insertln() = {
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ cond match {
+ case null =>
+ val optv = ln.get(k)
+ if (insertln()) optv else null
+ case INode.KEY_ABSENT =>
+ ln.get(k) match {
+ case None => if (insertln()) None else null
+ case optv => optv
+ }
+ case INode.KEY_PRESENT =>
+ ln.get(k) match {
+ case Some(v0) => if (insertln()) Some(v0) else null
+ case None => None
+ }
+ case otherv: V =>
+ ln.get(k) match {
+ case Some(v0) if v0 == otherv => if (insertln()) Some(otherv) else null
+ case _ => None
+ }
+ }
+ }
+ }
+
+ /** Looks up the value associated with the key.
+ *
+ * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
+ */
+ @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multinode
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ if ((bmp & flag) == 0) null // 1a) bitmap shows no binding
+ else { // 1b) bitmap contains a value - descend
+ val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ sub match {
+ case in: INode[K, V] =>
+ if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
+ else return RESTART // used to be throw RestartException
+ }
+ case sn: SNode[K, V] => // 2) singleton node
+ if (sn.hc == hc && sn.k == k) sn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ }
+ case tn: TNode[K, V] => // 3) non-live node
+ def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) {
+ clean(parent, ct, lev - 5)
+ RESTART // used to be throw RestartException
+ } else {
+ if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ cleanReadOnly(tn)
+ case ln: LNode[K, V] => // 5) an l-node
+ ln.get(k).asInstanceOf[Option[AnyRef]].orNull
+ }
+ }
+
+ /** Removes the key associated with the given value.
+ *
+ * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
+ * @return null if not successful, an Option[V] indicating the previous value otherwise
+ */
+ final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> lev) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) None
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ val res = sub match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && sn.k == k && (v == null || sn.v == v)) {
+ val ncn = cn.removedAt(pos, flag, gen).toContracted(lev)
+ if (GCAS(cn, ncn, ct)) Some(sn.v) else null
+ } else None
+ }
+
+ if (res == None || (res eq null)) res
+ else {
+ @tailrec def cleanParent(nonlive: AnyRef) {
+ val pm = parent.GCAS_READ(ct)
+ pm match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> (lev - 5)) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ if (sub eq this) nonlive match {
+ case tn: TNode[K, V] =>
+ val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5)
+ if (!parent.GCAS(cn, ncn, ct))
+ if (ct.readRoot().gen == startgen) cleanParent(nonlive)
+ }
+ }
+ case _ => // parent is no longer a cnode, we're done
+ }
+ }
+
+ if (parent ne null) { // never tomb at root
+ val n = GCAS_READ(ct)
+ if (n.isInstanceOf[TNode[_, _]])
+ cleanParent(n)
+ }
+
+ res
+ }
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] =>
+ if (v == null) {
+ val optv = ln.get(k)
+ val nn = ln.removed(k)
+ if (GCAS(ln, nn, ct)) optv else null
+ } else ln.get(k) match {
+ case optv @ Some(v0) if v0 == v =>
+ val nn = ln.removed(k)
+ if (GCAS(ln, nn, ct)) optv else null
+ case _ => None
+ }
+ }
+ }
+
+ private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) {
+ val m = nd.GCAS_READ(ct)
+ m match {
+ case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct)
+ case _ =>
+ }
+ }
+
+ final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null
+
+ final def cachedSize(ct: Ctrie[K, V]): Int = {
+ val m = GCAS_READ(ct)
+ m.cachedSize(ct)
+ }
+
+ /* this is a quiescent method! */
+ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match {
+ case null => "<null>"
+ case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc)
+ case cn: CNode[_, _] => cn.string(lev)
+ case ln: LNode[_, _] => ln.string(lev)
+ case x => "<elem: %s>".format(x)
+ })
+
+}
+
+
+private[mutable] object INode {
+ val KEY_PRESENT = new AnyRef
+ val KEY_ABSENT = new AnyRef
+
+ def newRootNode[K, V] = {
+ val gen = new Gen
+ val cn = new CNode[K, V](0, new Array(0), gen)
+ new INode[K, V](cn, gen)
+ }
+}
+
+
+private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] {
+ WRITE_PREV(p)
+
+ def string(lev: Int) = throw new UnsupportedOperationException
+
+ def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException
+
+ override def toString = "FailedNode(%s)".format(p)
+}
+
+
+private[mutable] trait KVNode[K, V] {
+ def kvPair: (K, V)
+}
+
+
+private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends BasicNode with KVNode[K, V] {
+ final def copy = new SNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc)
+}
+
+
+private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends MainNode[K, V] with KVNode[K, V] {
+ final def copy = new TNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def cachedSize(ct: AnyRef): Int = 1
+ final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc)
+}
+
+
+private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+extends MainNode[K, V] {
+ def this(k: K, v: V) = this(ImmutableListMap(k -> v))
+ def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+ def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
+ def removed(k: K): MainNode[K, V] = {
+ val updmap = listmap - k
+ if (updmap.size > 1) new LNode(updmap)
+ else {
+ val (k, v) = updmap.iterator.next
+ new TNode(k, v, Ctrie.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
+ }
+ }
+ def get(k: K) = listmap.get(k)
+ def cachedSize(ct: AnyRef): Int = listmap.size
+ def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", "))
+}
+
+
+private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen)
+extends CNodeBase[K, V] {
+
+ // this should only be called from within read-only snapshots
+ final def cachedSize(ct: AnyRef) = {
+ val currsz = READ_SIZE()
+ if (currsz != -1) currsz
+ else {
+ val sz = computeSize(ct.asInstanceOf[Ctrie[K, V]])
+ while (READ_SIZE() == -1) CAS_SIZE(-1, sz)
+ READ_SIZE()
+ }
+ }
+
+ // lends itself towards being parallelizable by choosing
+ // a random starting offset in the array
+ // => if there are concurrent size computations, they start
+ // at different positions, so they are more likely to
+ // to be independent
+ private def computeSize(ct: Ctrie[K, V]): Int = {
+ var i = 0
+ var sz = 0
+ val offset = math.abs(util.Random.nextInt()) % array.length
+ while (i < array.length) {
+ val pos = (i + offset) % array.length
+ array(pos) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ct)
+ }
+ i += 1
+ }
+ sz
+ }
+
+ final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val narr = new Array[BasicNode](len)
+ Array.copy(array, 0, narr, 0, len)
+ narr(pos) = nn
+ new CNode[K, V](bitmap, narr, gen)
+ }
+
+ final def removedAt(pos: Int, flag: Int, gen: Gen) = {
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len - 1)
+ Array.copy(arr, 0, narr, 0, pos)
+ Array.copy(arr, pos + 1, narr, pos, len - pos - 1)
+ new CNode[K, V](bitmap ^ flag, narr, gen)
+ }
+
+ final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val bmp = bitmap
+ val narr = new Array[BasicNode](len + 1)
+ Array.copy(array, 0, narr, 0, pos)
+ narr(pos) = nn
+ Array.copy(array, pos, narr, pos + 1, len - pos)
+ new CNode[K, V](bmp | flag, narr, gen)
+ }
+
+ /** Returns a copy of this cnode such that all the i-nodes below it are copied
+ * to the specified generation `ngen`.
+ */
+ final def renewed(ngen: Gen, ct: Ctrie[K, V]) = {
+ var i = 0
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len)
+ while (i < len) {
+ arr(i) match {
+ case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct)
+ case bn: BasicNode => narr(i) = bn
+ }
+ i += 1
+ }
+ new CNode[K, V](bitmap, narr, ngen)
+ }
+
+ private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match {
+ case tn: TNode[_, _] => tn.copyUntombed
+ case _ => inode
+ }
+
+ final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
+ case sn: SNode[K, V] => sn.copyTombed
+ case _ => this
+ } else this
+
+ // - if the branching factor is 1 for this CNode, and the child
+ // is a tombed SNode, returns its tombed version
+ // - otherwise, if there is at least one non-null node below,
+ // returns the version of this node with at least some null-inodes
+ // removed (those existing when the op began)
+ // - if there are only null-i-nodes below, returns null
+ final def toCompressed(ct: Ctrie[K, V], lev: Int, gen: Gen) = {
+ var bmp = bitmap
+ var i = 0
+ val arr = array
+ val tmparray = new Array[BasicNode](arr.length)
+ while (i < arr.length) { // construct new bitmap
+ val sub = arr(i)
+ sub match {
+ case in: INode[K, V] =>
+ val inodemain = in.gcasRead(ct)
+ assert(inodemain ne null)
+ tmparray(i) = resurrect(in, inodemain)
+ case sn: SNode[K, V] =>
+ tmparray(i) = sn
+ }
+ i += 1
+ }
+
+ new CNode[K, V](bmp, tmparray, gen).toContracted(lev)
+ }
+
+ private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
+
+ /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
+ protected def collectElems: Seq[(K, V)] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair)
+ case in: INode[K, V] => in.mainnode match {
+ case tn: TNode[K, V] => Some(tn.kvPair)
+ case ln: LNode[K, V] => ln.listmap.toList
+ case cn: CNode[K, V] => cn.collectElems
+ }
+ }
+
+ protected def collectLocalElems: Seq[String] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
+ case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
+ }
+
+ override def toString = {
+ val elems = collectLocalElems
+ "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", "))
+ }
+}
+
+
+private[mutable] object CNode {
+
+ def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) {
+ val xidx = (xhc >>> lev) & 0x1f
+ val yidx = (yhc >>> lev) & 0x1f
+ val bmp = (1 << xidx) | (1 << yidx)
+ if (xidx == yidx) {
+ val subinode = new INode[K, V](gen)//(Ctrie.inodeupdater)
+ subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen)
+ new CNode(bmp, Array(subinode), gen)
+ } else {
+ if (xidx < yidx) new CNode(bmp, Array(x, y), gen)
+ else new CNode(bmp, Array(y, x), gen)
+ }
+ } else {
+ new LNode(x.k, x.v, y.k, y.v)
+ }
+
+}
+
+
+private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) {
+ @volatile var committed = false
+}
+
+
+/** A concurrent hash-trie or Ctrie is a concurrent thread-safe lock-free
+ * implementation of a hash array mapped trie. It is used to implement the
+ * concurrent map abstraction. It has particularly scalable concurrent insert
+ * and remove operations and is memory-efficient. It supports O(1), atomic,
+ * lock-free snapshots which are used to implement linearizable lock-free size,
+ * iterator and clear operations. The cost of evaluating the (lazy) snapshot is
+ * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable.
+ *
+ * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ */
+@SerialVersionUID(0L - 6402774413839597105L)
+final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef])
+extends ConcurrentMap[K, V]
+ with MapLike[K, V, Ctrie[K, V]]
+ with CustomParallelizable[(K, V), ParCtrie[K, V]]
+ with Serializable
+{
+ import Ctrie.computeHash
+
+ private var rootupdater = rtupd
+ @volatile var root = r
+
+ def this() = this(
+ INode.newRootNode,
+ AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+ )
+
+ /* internal methods */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ val it = iterator
+ while (it.hasNext) {
+ val (k, v) = it.next()
+ out.writeObject(k)
+ out.writeObject(v)
+ }
+ out.writeObject(CtrieSerializationEnd)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ root = INode.newRootNode
+ rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+
+ var obj: AnyRef = null
+ do {
+ obj = in.readObject()
+ if (obj != CtrieSerializationEnd) {
+ val k = obj.asInstanceOf[K]
+ val v = in.readObject().asInstanceOf[V]
+ update(k, v)
+ }
+ } while (obj != CtrieSerializationEnd)
+ }
+
+ @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+
+ final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+
+ @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ val r = /*READ*/root
+ r match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort)
+ }
+ }
+
+ @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = {
+ val v = /*READ*/root
+ v match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] =>
+ val RDCSS_Descriptor(ov, exp, nv) = desc
+ if (abort) {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ } else {
+ val oldmain = ov.gcasRead(this)
+ if (oldmain eq exp) {
+ if (CAS_ROOT(desc, nv)) {
+ desc.committed = true
+ nv
+ } else RDCSS_Complete(abort)
+ } else {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ }
+ }
+ }
+ }
+
+ private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
+ val desc = RDCSS_Descriptor(ov, expectedmain, nv)
+ if (CAS_ROOT(ov, desc)) {
+ RDCSS_Complete(false)
+ /*READ*/desc.committed
+ } else false
+ }
+
+ @tailrec private def inserthc(k: K, hc: Int, v: V) {
+ val r = RDCSS_READ_ROOT()
+ if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v)
+ }
+
+ @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+
+ val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this)
+ if (ret eq null) insertifhc(k, hc, v, cond)
+ else ret
+ }
+
+ @tailrec private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_lookup(k, hc, 0, null, r.gen, this)
+ if (res eq INodeBase.RESTART) lookuphc(k, hc)
+ else res
+ }
+
+ /* slower:
+ //@tailrec
+ private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ try {
+ r.rec_lookup(k, hc, 0, null, r.gen, this)
+ } catch {
+ case RestartException =>
+ lookuphc(k, hc)
+ }
+ }
+ */
+
+ @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_remove(k, v, hc, 0, null, r.gen, this)
+ if (res ne null) res
+ else removehc(k, v, hc)
+ }
+
+ def string = RDCSS_READ_ROOT().string(0)
+
+ /* public methods */
+
+ override def seq = this
+
+ override def par = new ParCtrie(this)
+
+ override def empty: Ctrie[K, V] = new Ctrie[K, V]
+
+ final def isReadOnly = rootupdater eq null
+
+ final def nonReadOnly = rootupdater ne null
+
+ /** Returns a snapshot of this Ctrie.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * in the snapshot or this Ctrie are accessed, they are rewritten.
+ * This means that the work of rebuilding both the snapshot and this
+ * Ctrie is distributed across all the threads doing updates or accesses
+ * subsequent to the snapshot creation.
+ */
+ @tailrec final def snapshot(): Ctrie[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater)
+ else snapshot()
+ }
+
+ /** Returns a read-only snapshot of this Ctrie.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * of this Ctrie are accessed, it is rewritten. The work of creating
+ * the snapshot is thus distributed across subsequent updates
+ * and accesses on this Ctrie by all threads.
+ * Note that the snapshot itself is never rewritten unlike when calling
+ * the `snapshot` method, but the obtained snapshot cannot be modified.
+ *
+ * This method is used by other methods such as `size` and `iterator`.
+ */
+ @tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null)
+ else readOnlySnapshot()
+ }
+
+ @tailrec final override def clear() {
+ val r = RDCSS_READ_ROOT()
+ if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
+ }
+
+ final def lookup(k: K): V = {
+ val hc = computeHash(k)
+ lookuphc(k, hc).asInstanceOf[V]
+ }
+
+ final override def apply(k: K): V = {
+ val hc = computeHash(k)
+ val res = lookuphc(k, hc)
+ if (res eq null) throw new NoSuchElementException
+ else res.asInstanceOf[V]
+ }
+
+ final def get(k: K): Option[V] = {
+ val hc = computeHash(k)
+ Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
+ }
+
+ override def put(key: K, value: V): Option[V] = {
+ val hc = computeHash(key)
+ insertifhc(key, hc, value, null)
+ }
+
+ final override def update(k: K, v: V) {
+ val hc = computeHash(k)
+ inserthc(k, hc, v)
+ }
+
+ final def +=(kv: (K, V)) = {
+ update(kv._1, kv._2)
+ this
+ }
+
+ final override def remove(k: K): Option[V] = {
+ val hc = computeHash(k)
+ removehc(k, null.asInstanceOf[V], hc)
+ }
+
+ final def -=(k: K) = {
+ remove(k)
+ this
+ }
+
+ def putIfAbsent(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_ABSENT)
+ }
+
+ def remove(k: K, v: V): Boolean = {
+ val hc = computeHash(k)
+ removehc(k, v, hc).nonEmpty
+ }
+
+ def replace(k: K, oldvalue: V, newvalue: V): Boolean = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty
+ }
+
+ def replace(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_PRESENT)
+ }
+
+ def iterator: Iterator[(K, V)] =
+ if (nonReadOnly) readOnlySnapshot().iterator
+ else new CtrieIterator(0, this)
+
+ private def cachedSize() = {
+ val r = RDCSS_READ_ROOT()
+ r.cachedSize(this)
+ }
+
+ override def size: Int =
+ if (nonReadOnly) readOnlySnapshot().size
+ else cachedSize()
+
+ override def stringPrefix = "Ctrie"
+
+}
+
+
+object Ctrie extends MutableMapFactory[Ctrie] {
+ val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
+
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V]
+
+ def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V]
+
+ @inline final def computeHash[K](k: K): Int = {
+ var hcode = k.hashCode
+ hcode = hcode * 0x9e3775cd
+ hcode = java.lang.Integer.reverseBytes(hcode)
+ hcode * 0x9e3775cd
+ }
+
+}
+
+
+private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
+ var stack = new Array[Array[BasicNode]](7)
+ var stackpos = new Array[Int](7)
+ var depth = -1
+ var subiter: Iterator[(K, V)] = null
+ var current: KVNode[K, V] = null
+
+ if (mustInit) initialize()
+
+ def hasNext = (current ne null) || (subiter ne null)
+
+ def next() = if (hasNext) {
+ var r: (K, V) = null
+ if (subiter ne null) {
+ r = subiter.next()
+ checkSubiter()
+ } else {
+ r = current.kvPair
+ advance()
+ }
+ r
+ } else Iterator.empty.next()
+
+ private def readin(in: INode[K, V]) = in.gcasRead(ct) match {
+ case cn: CNode[K, V] =>
+ depth += 1
+ stack(depth) = cn.array
+ stackpos(depth) = -1
+ advance()
+ case tn: TNode[K, V] =>
+ current = tn
+ case ln: LNode[K, V] =>
+ subiter = ln.listmap.iterator
+ checkSubiter()
+ case null =>
+ current = null
+ }
+
+ @inline private def checkSubiter() = if (!subiter.hasNext) {
+ subiter = null
+ advance()
+ }
+
+ @inline private def initialize() {
+ assert(ct.isReadOnly)
+
+ val r = ct.RDCSS_READ_ROOT()
+ readin(r)
+ }
+
+ def advance(): Unit = if (depth >= 0) {
+ val npos = stackpos(depth) + 1
+ if (npos < stack(depth).length) {
+ stackpos(depth) = npos
+ stack(depth)(npos) match {
+ case sn: SNode[K, V] =>
+ current = sn
+ case in: INode[K, V] =>
+ readin(in)
+ }
+ } else {
+ depth -= 1
+ advance()
+ }
+ } else current = null
+
+ protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit)
+
+ protected def dupTo(it: CtrieIterator[K, V]) = {
+ it.level = this.level
+ it.ct = this.ct
+ it.depth = this.depth
+ it.current = this.current
+
+ // these need a deep copy
+ Array.copy(this.stack, 0, it.stack, 0, 7)
+ Array.copy(this.stackpos, 0, it.stackpos, 0, 7)
+
+ // this one needs to be evaluated
+ if (this.subiter == null) it.subiter = null
+ else {
+ val lst = this.subiter.toList
+ this.subiter = lst.iterator
+ it.subiter = lst.iterator
+ }
+ }
+
+ /** Returns a sequence of iterators over subsets of this iterator.
+ * It's used to ease the implementation of splitters for a parallel version of the Ctrie.
+ */
+ protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
+ // the case where an LNode is being iterated
+ val it = subiter
+ subiter = null
+ advance()
+ this.level += 1
+ Seq(it, this)
+ } else if (depth == -1) {
+ this.level += 1
+ Seq(this)
+ } else {
+ var d = 0
+ while (d <= depth) {
+ val rem = stack(d).length - 1 - stackpos(d)
+ if (rem > 0) {
+ val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
+ stack(d) = arr1
+ stackpos(d) = -1
+ val it = newIterator(level + 1, ct, false)
+ it.stack(0) = arr2
+ it.stackpos(0) = -1
+ it.depth = 0
+ it.advance() // <-- fix it
+ this.level += 1
+ return Seq(this, it)
+ }
+ d += 1
+ }
+ this.level += 1
+ Seq(this)
+ }
+
+ def printDebug {
+ println("ctrie iterator")
+ println(stackpos.mkString(","))
+ println("depth: " + depth)
+ println("curr.: " + current)
+ println(stack.mkString("\n"))
+ }
+
+}
+
+
+private[mutable] object RestartException extends util.control.ControlThrowable
+
+
+/** Only used for ctrie serialization. */
+@SerialVersionUID(0L - 7237891413820527142L)
+private[mutable] case object CtrieSerializationEnd
+
+
+private[mutable] object Debug {
+ import collection._
+
+ lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
+
+ def log(s: AnyRef) = logbuffer.add(s)
+
+ def flush() {
+ for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
+ logbuffer.clear()
+ }
+
+ def clear() {
+ logbuffer.clear()
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/src/partest-alternative/scala/tools/partest/Properties.scala b/src/library/scala/collection/mutable/Gen.java
index 2d36f163c8..0c9a30d198 100644
--- a/src/partest-alternative/scala/tools/partest/Properties.scala
+++ b/src/library/scala/collection/mutable/Gen.java
@@ -1,17 +1,18 @@
/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+package scala.collection.mutable;
-package scala.tools
-package partest
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[Application]
+
+
+
+
+final class Gen {
}
+
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index cdf1b78f29..5b3e07b826 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -52,6 +52,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
+ @transient var seedvalue: Int = tableSizeSeed
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
protected def initialSize: Int = HashTable.initialSize
private def lastPopulatedIndex = {
@@ -70,14 +74,16 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
in.defaultReadObject
- _loadFactor = in.readInt
+ _loadFactor = in.readInt()
assert(_loadFactor > 0)
- val size = in.readInt
+ val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
- val smDefined = in.readBoolean
+
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
threshold = newThreshold(_loadFactor, table.size)
@@ -86,7 +92,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B]))
+ addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B]))
index += 1
}
}
@@ -103,6 +109,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
+ out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
foreachEntry { entry =>
out.writeObject(entry.key)
@@ -314,7 +321,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
// this is of crucial importance when populating the table in parallel
protected final def index(hcode: Int) = {
val ones = table.length - 1
- val improved = improve(hcode)
+ val improved = improve(hcode, seedvalue)
val shifted = (improved >> (32 - java.lang.Integer.bitCount(ones))) & ones
shifted
}
@@ -325,6 +332,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table = c.table
tableSize = c.tableSize
threshold = c.threshold
+ seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
@@ -335,6 +343,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table,
tableSize,
threshold,
+ seedvalue,
sizemap
)
}
@@ -368,7 +377,7 @@ private[collection] object HashTable {
protected def elemHashCode(key: KeyType) = key.##
- protected final def improve(hcode: Int) = {
+ protected final def improve(hcode: Int, seed: Int) = {
/* Murmur hash
* m = 0x5bd1e995
* r = 24
@@ -396,7 +405,7 @@ private[collection] object HashTable {
* */
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
- i * 0x9e3775cd
+ i = i * 0x9e3775cd
// a slower alternative for byte reversal:
// i = (i << 16) | (i >> 16)
// i = ((i >> 8) & 0x00ff00ff) | ((i << 8) & 0xff00ff00)
@@ -420,6 +429,11 @@ private[collection] object HashTable {
// h = h ^ (h >>> 14)
// h = h + (h << 4)
// h ^ (h >>> 10)
+
+ // the rest of the computation is due to SI-5293
+ val rotation = seed % 32
+ val rotated = (i >>> rotation) | (i << (32 - rotation))
+ rotated
}
}
@@ -442,6 +456,7 @@ private[collection] object HashTable {
val table: Array[HashEntry[A, Entry]],
val tableSize: Int,
val threshold: Int,
+ val seedvalue: Int,
val sizemap: Array[Int]
) {
import collection.DebugUtils._
@@ -452,6 +467,7 @@ private[collection] object HashTable {
append("Table: [" + arrayString(table, 0, table.length) + "]")
append("Table size: " + tableSize)
append("Load factor: " + loadFactor)
+ append("Seedvalue: " + seedvalue)
append("Threshold: " + threshold)
append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]")
}
diff --git a/src/library/scala/collection/mutable/INodeBase.java b/src/library/scala/collection/mutable/INodeBase.java
new file mode 100644
index 0000000000..487b5cfc28
--- /dev/null
+++ b/src/library/scala/collection/mutable/INodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class INodeBase<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
+
+ public static final Object RESTART = new Object();
+
+ public volatile MainNode<K, V> mainnode = null;
+
+ public final Gen gen;
+
+ public INodeBase(Gen generation) {
+ gen = generation;
+ }
+
+ public BasicNode prev() {
+ return null;
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 131cdd0005..53c876ec08 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -13,6 +13,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
+import java.io._
/** A `Buffer` implementation back up by a list. It provides constant time
* prepend and append. Most other operations are linear.
@@ -40,7 +41,7 @@ import immutable.{List, Nil, ::}
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@SerialVersionUID(3419063961353022661L)
+@SerialVersionUID(3419063961353022662L)
final class ListBuffer[A]
extends AbstractBuffer[A]
with Buffer[A]
@@ -53,6 +54,7 @@ final class ListBuffer[A]
override def companion: GenericCompanion[ListBuffer] = ListBuffer
import scala.collection.Traversable
+ import scala.collection.immutable.ListSerializeEnd
private var start: List[A] = Nil
private var last0: ::[A] = _
@@ -60,7 +62,49 @@ final class ListBuffer[A]
private var len = 0
protected def underlying: immutable.Seq[A] = start
-
+
+ private def writeObject(out: ObjectOutputStream) {
+ // write start
+ var xs: List[A] = start
+ while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
+ out.writeObject(ListSerializeEnd)
+
+ // no need to write last0
+
+ // write if exported
+ out.writeBoolean(exported)
+
+ // write the length
+ out.writeInt(len)
+ }
+
+ private def readObject(in: ObjectInputStream) {
+ // read start, set last0 appropriately
+ var elem: A = in.readObject.asInstanceOf[A]
+ if (elem == ListSerializeEnd) {
+ start = Nil
+ last0 = null
+ } else {
+ var current = new ::(elem, Nil)
+ start = current
+ elem = in.readObject.asInstanceOf[A]
+ while (elem != ListSerializeEnd) {
+ val list = new ::(elem, Nil)
+ current.tl = list
+ current = list
+ elem = in.readObject.asInstanceOf[A]
+ }
+ last0 = current
+ start
+ }
+
+ // read if exported
+ exported = in.readBoolean()
+
+ // read the length
+ len = in.readInt()
+ }
+
/** The current length of the buffer.
*
* This operation takes constant time.
@@ -355,7 +399,7 @@ final class ListBuffer[A]
private def copy() {
var cursor = start
val limit = last0.tail
- clear
+ clear()
while (cursor ne limit) {
this += cursor.head
cursor = cursor.tail
diff --git a/src/library/scala/collection/mutable/MainNode.java b/src/library/scala/collection/mutable/MainNode.java
new file mode 100644
index 0000000000..0578de676d
--- /dev/null
+++ b/src/library/scala/collection/mutable/MainNode.java
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.mutable;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class MainNode<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
+
+ public volatile MainNode<K, V> prev = null;
+
+ public abstract int cachedSize(Object ct);
+
+ public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_PREV(MainNode<K, V> nval) {
+ updater.set(this, nval);
+ }
+
+ // do we need this? unclear in the javadocs...
+ // apparently not - volatile reads are supposed to be safe
+ // irregardless of whether there are concurrent ARFU updates
+ public MainNode<K, V> READ_PREV() {
+ return updater.get(this);
+ }
+
+} \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
new file mode 100644
index 0000000000..d87fc0b4a2
--- /dev/null
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -0,0 +1,49 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/**
+ * Base trait for mutable sorted set.
+ *
+ * @define Coll mutable.SortedSet
+ * @define coll mutable sorted set
+ *
+ * @author Lucien Pereira
+ *
+ */
+trait SortedSet[A] extends collection.SortedSet[A] with collection.SortedSetLike[A,SortedSet[A]]
+ with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
+
+ /** Needs to be overridden in subclasses. */
+ override def empty: SortedSet[A] = SortedSet.empty[A]
+
+}
+
+/**
+ * A template for mutable sorted set companion objects.
+ *
+ * @define Coll mutable.SortedSet
+ * @define coll mutable sorted set
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted sets of type mutable.SortedSet.
+ * @define sortedSetCanBuildFromInfo
+ * Standard `CanBuildFrom` instance for sorted sets.
+ *
+ * @author Lucien Pereira
+ *
+ */
+object SortedSet extends MutableSortedSetFactory[SortedSet] {
+ implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+
+ def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
+
+}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
new file mode 100644
index 0000000000..e0f1c3adfe
--- /dev/null
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -0,0 +1,123 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/**
+ * @define Coll mutable.TreeSet
+ * @define coll mutable tree set
+ * @factoryInfo
+ * Companion object of TreeSet providing factory related utilities.
+ *
+ * @author Lucien Pereira
+ *
+ */
+object TreeSet extends MutableSortedSetFactory[TreeSet] {
+ /**
+ * The empty set of this type
+ */
+ def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]()
+
+}
+
+/**
+ * A mutable SortedSet using an immutable AVL Tree as underlying data structure.
+ *
+ * @author Lucien Pereira
+ *
+ */
+class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
+ with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
+
+ // Projection constructor
+ private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) {
+ this();
+ this.base = base
+ this.from = from
+ this.until = until
+ }
+
+ private var base: Option[TreeSet[A]] = None
+
+ private var from: Option[A] = None
+
+ private var until: Option[A] = None
+
+ private var avl: AVLTree[A] = Leaf
+
+ private var cardinality: Int = 0
+
+ def resolve: TreeSet[A] = base.getOrElse(this)
+
+ private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean =
+ from.map(x => ordering.gteq(a, x)).getOrElse(true)
+
+ private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean =
+ until.map(x => ordering.lt(a, x)).getOrElse(true)
+
+ /**
+ * Cardinality store the set size, unfortunately a
+ * set view (given by rangeImpl)
+ * cannot take advantage of this optimisation
+ *
+ */
+ override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
+
+ override def stringPrefix = "TreeSet"
+
+ override def empty: TreeSet[A] = TreeSet.empty
+
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until)
+
+ override def -=(elem: A): this.type = {
+ try {
+ resolve.avl = resolve.avl.remove(elem, ordering)
+ resolve.cardinality = resolve.cardinality - 1
+ } catch {
+ case e: NoSuchElementException => ()
+ }
+ this
+ }
+
+ override def +=(elem: A): this.type = {
+ try {
+ resolve.avl = resolve.avl.insert(elem, ordering)
+ resolve.cardinality = resolve.cardinality + 1
+ } catch {
+ case e: IllegalArgumentException => ()
+ }
+ this
+ }
+
+ /**
+ * Thanks to the immutable nature of the
+ * underlying AVL Tree, we can share it with
+ * the clone. So clone complexity in time is O(1).
+ *
+ */
+ override def clone: TreeSet[A] = {
+ val clone = new TreeSet[A](base, from, until)
+ clone.avl = resolve.avl
+ clone.cardinality = resolve.cardinality
+ clone
+ }
+
+ override def contains(elem: A): Boolean = {
+ isLeftAcceptable(from, ordering)(elem) &&
+ isRightAcceptable(until, ordering)(elem) &&
+ resolve.avl.contains(elem, ordering)
+ }
+
+ override def iterator: Iterator[A] = resolve.avl.iterator
+ .dropWhile(e => !isLeftAcceptable(from, ordering)(e))
+ .takeWhile(e => isRightAcceptable(until, ordering)(e))
+
+}
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index d1453c9ce9..e304be92ae 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -34,7 +34,6 @@ import scala.collection.generic.Sizing
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
//self: EnvironmentPassingCombiner[Elem, To] =>
- private[collection] final val tasksupport = getTaskSupport
/** Combines the contents of the receiver builder and the `other` builder,
* producing a new builder containing both their elements.
@@ -62,7 +61,14 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* @return the parallel builder containing both the elements of this and the `other` builder
*/
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
-
+
+ /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared
+ * across several threads constructing the collection.
+ *
+ * By default, this method returns `false`.
+ */
+ def canBeShared: Boolean = false
+
}
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index f0d79ada9d..7c5a835e56 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -96,17 +96,6 @@ import annotation.unchecked.uncheckedVariance
* The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible
* way to change between different collection types.
*
- * The method:
- *
- * {{{
- * def threshold(sz: Int, p: Int): Int
- * }}}
- *
- * provides an estimate on the minimum number of elements the collection has before
- * the splitting stops and depends on the number of elements in the collection. A rule of the
- * thumb is the number of elements divided by 8 times the parallelism level. This method may
- * be overridden in concrete implementations if necessary.
- *
* Since this trait extends the `Iterable` trait, methods like `size` must also
* be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
@@ -165,53 +154,17 @@ extends GenIterableLike[T, Repr]
with HasNewCombiner[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
-
+
import tasksupport._
-
+
def seq: Sequential
def repr: Repr = this.asInstanceOf[Repr]
- /** Parallel iterators are split iterators that have additional accessor and
- * transformer methods defined in terms of methods `next` and `hasNext`.
- * When creating a new parallel collection, one might want to override these
- * new methods to make them more efficient.
- *
- * Parallel iterators are augmented with signalling capabilities. This means
- * that a signalling object can be assigned to them as needed.
- *
- * The self-type ensures that signal context passing behaviour gets mixed in
- * a concrete object instance.
- */
- trait ParIterator extends IterableSplitter[T] {
- me: SignalContextPassingIterator[ParIterator] =>
- var signalDelegate: Signalling = IdleSignalling
- def repr = self.repr
- def split: Seq[IterableSplitter[T]]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator] extends ParIterator {
- // Note: This functionality must be factored out to this inner trait to avoid boilerplate.
- // Also, one could omit the cast below. However, this leads to return type inconsistencies,
- // due to inability to override the return type of _abstract overrides_.
- // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid
- // on the type of iterators it splits.
- // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here.
- abstract override def split: Seq[IterRepr] = {
- val pits = super.split
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
def hasDefiniteSize = true
def nonEmpty = size != 0
-
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
@@ -242,18 +195,6 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
def isStrictSplitterCollection = true
- /** Some minimal number of elements after which this collection should be handled
- * sequentially by different processors.
- *
- * This method depends on the size of the collection and the parallelism level, which
- * are both specified as arguments.
- *
- * @param sz the size based on which to compute the threshold
- * @param p the parallelism level based on which to compute the threshold
- * @return the maximum number of elements for performing operations sequentially
- */
- def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p)
-
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
@@ -293,7 +234,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
trait SignallingOps[PI <: DelegatedSignalling] {
def assign(cntx: Signalling): PI
}
-
+
/* convenience task operations wrapper */
protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] {
def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) {
@@ -321,7 +262,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
it
}
}
-
+
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
@@ -331,12 +272,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
def isCombiner = cb.isInstanceOf[Combiner[_, _]]
def asCombiner = cb.asInstanceOf[Combiner[Elem, To]]
}
-
+
protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass`
def apply() = bf.apply()
}
-
+
protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr]
def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end)
@@ -346,7 +287,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def mkString: String = seq.mkString("")
override def toString = seq.mkString(stringPrefix + "(", ", ", ")")
-
+
def canEqual(other: Any) = true
/** Reduces the elements of this sequence using the specified associative binary operator.
@@ -383,7 +324,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements if the collection is nonempty, and `None` otherwise.
*/
def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op))
-
+
/** Folds the elements of this sequence using the specified associative binary operator.
* The order in which the elements are reduced is unspecified and may be nondeterministic.
*
@@ -434,15 +375,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
-
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
-
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
-
+
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
-
+
def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
-
+
def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op)
def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op)
@@ -451,20 +388,6 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /*
- /** Applies a function `f` to all the elements of $coll. Does so in a nondefined order,
- * and in parallel.
- *
- * $undefinedorder
- *
- * @tparam U the result type of the function applied to each element, which is always discarded
- * @param f function applied to each element
- */
- def pareach[U](f: T => U): Unit = {
- executeAndWaitResult(new Foreach(f, splitter))
- }
- */
-
/** Applies a function `f` to all the elements of $coll in a sequential order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
@@ -505,23 +428,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
-
+
def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new Map[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result })
+ executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result })
} else seq.map(f)(bf2seq(bf))
/*bf ifParallel { pbf =>
executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
} otherwise seq.map(f)(bf2seq(bf))*/
def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new Collect[S, That](pf, () => bf(repr).asCombiner, splitter) mapResult { _.result })
+ executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result })
} else seq.collect(pf)(bf2seq(bf))
/*bf ifParallel { pbf =>
executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
} otherwise seq.collect(pf)(bf2seq(bf))*/
def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
- executeAndWaitResult(new FlatMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result })
+ executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result })
} else seq.flatMap(f)(bf2seq(bf))
/*bf ifParallel { pbf =>
executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
@@ -563,17 +486,48 @@ self: ParIterableLike[T, Repr, Sequential] =>
def find(pred: T => Boolean): Option[T] = {
executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
-
- protected[this] def cbfactory ={
- () => newCombiner
+
+ /** Creates a combiner factory. Each combiner factory instance is used
+ * once per invocation of a parallel transformer method for a single
+ * collection.
+ *
+ * The default combiner factory creates a new combiner every time it
+ * is requested, unless the combiner is thread-safe as indicated by its
+ * `canBeShared` method. In this case, the method returns a factory which
+ * returns the same combiner each time. This is typically done for
+ * concurrent parallel collections, the combiners of which allow
+ * thread safe access.
+ */
+ protected[this] def combinerFactory = {
+ val combiner = newCombiner
+ if (combiner.canBeShared) new CombinerFactory[T, Repr] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[T, Repr] {
+ def apply() = newCombiner
+ def doesShareCombiners = false
+ }
}
-
+
+ protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = {
+ val combiner = cbf()
+ if (combiner.canBeShared) new CombinerFactory[S, That] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[S, That] {
+ def apply() = cbf()
+ def doesShareCombiners = false
+ }
+ }
+
def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result })
+ executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.result })
}
def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result })
+ executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.result })
}
def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
@@ -581,9 +535,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
// println("case both are parallel")
val other = that.asParIterable
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val cfactory = combinerFactory(() => pbf(repr))
+ val copythis = new Copy(cfactory, splitter)
val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.splitter)
+ val othtask = new other.Copy(cfactory, other.splitter)
tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
@@ -593,7 +548,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else if (bf.isParallel) {
// println("case parallel builder, `that` not parallel")
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val copythis = new Copy(combinerFactory(() => pbf(repr)), splitter)
val copythat = wrap {
val cb = pbf(repr)
for (elem <- that.seq) cb += elem
@@ -610,19 +565,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ executeAndWaitResult(new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
- rcb => rcb.groupByKey(cbfactory)
+ rcb => rcb.groupByKey(() => combinerFactory())
})
}
def take(n: Int): Repr = {
val actualn = if (size > n) n else size
if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult {
+ else executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult {
_.result
})
}
@@ -642,7 +597,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def drop(n: Int): Repr = {
val actualn = if (size > n) n else size
if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result })
+ else executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.result })
}
private def drop_sequential(n: Int) = {
@@ -657,7 +612,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val from = unc_from min size max 0
val until = unc_until min size max from
if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result })
+ else executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.result })
}
private def slice_sequential(from: Int, until: Int): Repr = {
@@ -672,7 +627,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ executeAndWaitResult(new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) })
}
/** Computes a prefix scan of the elements of the collection.
@@ -694,7 +649,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val cbf = bf.asParallel
if (parallelismLevel > 1) {
if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
- tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
+ tree => executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => cbf(repr))) mapResult {
cb => cb.result
})
}) else (cbf(self.repr) += z).result
@@ -714,9 +669,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the longest prefix of this $coll of elements that satisy the predicate `pred`
*/
def takeWhile(pred: T => Boolean): Repr = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val parseqspan = toSeq.takeWhile(pred)
+ executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult { _.result })
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult { _._1.result })
+ }
}
/** Splits this $coll into a prefix/suffix pair according to a predicate.
@@ -729,11 +690,22 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements satisfy `pred`, and the rest of the collection
*/
def span(pred: T => Boolean): (Repr, Repr) = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult {
- p => (p._1.result, p._2.result)
- })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val (xs, ys) = toSeq.span(pred)
+ val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.result }
+ val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.result }
+ val copyall = (copyxs parallel copyys) {
+ (xr, yr) => (xr, yr)
+ }
+ executeAndWaitResult(copyall)
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
+ p => (p._1.result, p._2.result)
+ })
+ }
}
/** Drops all elements in the longest prefix of elements that satisfy the predicate,
@@ -749,7 +721,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def dropWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result })
+ executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { _._2.result })
}
def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0)
@@ -765,7 +737,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result });
+ executeAndWaitResult(new Zip(combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result });
} else seq.zip(that)(bf2seq(bf))
def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
@@ -773,15 +745,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
val pbf = bf.asParallel
val thatseq = that.asParSeq
- executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result });
+ executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result });
} else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf))
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result });
+ executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.result });
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
- executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result })
+ executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.result })
}
def view = new ParIterableView[T, Repr, Sequential] {
@@ -801,7 +773,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toList: List[T] = seq.toList
- override def toIndexedSeq[U >: T]: collection.immutable.IndexedSeq[U] = seq.toIndexedSeq[U]
+ override def toIndexedSeq: collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
override def toStream: Stream[T] = seq.toStream
@@ -838,8 +810,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends StrictSplitterCheckTask[R, Tp] {
protected[this] val pit: IterableSplitter[T]
protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
- def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
- def split = pit.split.map(newSubtask(_)) // default split procedure
+ def shouldSplitFurther = pit.shouldSplitFurther(self.repr, parallelismLevel)
+ def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
private[parallel] override def signalAbort = pit.abort
override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
}
@@ -869,7 +841,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** Sequentially performs one task after another. */
protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
executeAndWaitResult(ft)
@@ -880,7 +852,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** Performs two tasks in parallel, and waits for both to finish. */
protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
val ftfuture = execute(ft)
@@ -895,23 +867,26 @@ self: ParIterableLike[T, Repr, Sequential] =>
@volatile var result: R1 = null.asInstanceOf[R1]
def map(r: R): R1
def leaf(prevr: Option[R1]) = {
- result = map(executeAndWaitResult(inner))
+ val initialResult = executeAndWaitResult(inner)
+ result = map(initialResult)
}
private[parallel] override def signalAbort() {
inner.signalAbort
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
}
-
+
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
-
- protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] {
+
+ protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
def leaf(prevr: Option[Unit]) = pit.foreach(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p)
}
- protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] {
+ protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Int, Count] {
// val pittxt = pit.toString
@volatile var result: Int = 0
def leaf(prevr: Option[Int]) = result = pit.count(pred)
@@ -919,8 +894,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Count) = result = result + that.result
// override def toString = "CountTask(" + pittxt + ")"
}
-
- protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] {
+
+ protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p)
@@ -930,7 +906,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] {
+ protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Fold[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p)
@@ -945,21 +922,24 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
- protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] {
+ protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Sum[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.sum(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
- protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] {
+ protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Product[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.product(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
- protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] {
+ protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Min[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p)
@@ -969,7 +949,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] {
+ protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Max[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p)
@@ -979,16 +960,16 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Map[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Map[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf()))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Collect[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf())
@@ -997,7 +978,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class FlatMap[S, That]
- (f: T => GenTraversableOnce[S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T])
+ (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
@volatile var result: Combiner[S, That] = null
def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf())
@@ -1009,28 +990,31 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] {
+ protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
- protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] {
+ protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
- protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] {
+ protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Filter[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1040,7 +1024,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Filter[U, This]) = result = result combine that.result
}
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1050,7 +1034,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Copy[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
@@ -1058,11 +1042,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Copy[U, That]) = result = result combine that.result
}
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Partition[U >: T, This >: Repr]
+ (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p)
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
@@ -1089,7 +1074,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Take[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Take[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1097,7 +1083,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp <= n) yield {
if (untilp + p.remaining < n) new Take(p.remaining, cbf, p)
@@ -1108,13 +1094,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Drop[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Drop[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield {
if (withp - p.remaining > n) new Drop(0, cbf, p)
@@ -1125,13 +1112,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Slice[U >: T, This >: Repr]
+ (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Slice[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield {
val f = (from max untilp) - untilp
@@ -1143,22 +1131,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class SplitAt[U >: T, This >: Repr]
+ (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p)
+ for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p)
}
override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
override def requiresStrictSplitters = true
}
protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
@volatile var result: (Combiner[U, This], Boolean) = null
def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
@@ -1167,7 +1156,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = (reuse(prev.map(_._1), cbf()), false)
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
}
override def merge(that: TakeWhile[U, This]) = if (result._2) {
@@ -1177,23 +1166,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
// val lst = pit.toList
// val pa = mutable.ParArray(lst: _*)
// val str = "At leaf we will iterate: " + pa.splitter.toList
- result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised
+ result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised
// println("\nAt leaf result is: " + result)
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
- result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
+ result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter())))
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
+ val pits = pit.splitWithSignalling
+ for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p)
}
override def merge(that: Span[U, This]) = result = if (result._2.size == 0) {
(result._1 combine that.result._1, that.result._2)
@@ -1203,15 +1192,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
(pits zip opits) map { p => new Zip(pbf, p._1, p._2) }
}
override def merge(that: Zip[U, S, That]) = result = result combine that.result
@@ -1219,18 +1208,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class ZipAll[U >: T, S, That]
- (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = if (pit.remaining <= len) {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) }
} else {
- val opits = othpit.psplit(pit.remaining)
+ val opits = othpit.psplitWithSignalling(pit.remaining)
val diff = len - pit.remaining
Seq(
new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed
@@ -1247,7 +1236,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
val plen = p.remaining min (len - untilp)
new CopyToArray[U, This](from + untilp, plen, array, p)
@@ -1256,7 +1245,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[U, That]]) {
@@ -1267,7 +1256,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: ToParCollection[U, That]) = result = result combine that.result
}
- protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
+ protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[(K, V), That]]) {
@@ -1304,7 +1293,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else trees(from)
protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
new CreateScanTree(untilp, p.remaining, z, op, p)
}
@@ -1314,13 +1303,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = that.result
override def requiresStrictSplitters = true
}
-
+
protected[this] class FromScanTree[U >: T, That]
- (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CanCombineFrom[Repr, U, That])
+ (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) {
- val cb = reuse(prev, cbf(self.repr))
+ val cb = reuse(prev, cbf())
iterate(tree, cb)
result = cb
}
@@ -1350,7 +1339,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* scan tree */
- protected[this] def scanBlockSize = (threshold(size, parallelismLevel) / 2) max 1
+ protected[this] def scanBlockSize = (thresholdFromSize(size, parallelismLevel) / 2) max 1
protected[this] trait ScanTree[U >: T] {
def beginsAt: Int
@@ -1390,7 +1379,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
def rightmost = this
def print(depth: Int) = println((" " * depth) + this)
}
-
+
+ /* alias methods */
+
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+
/* debug information */
private[parallel] def debugInformation = "Parallel collection: " + this.getClass
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index beb50a41e1..afd1f30903 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -66,7 +66,6 @@ self =>
new IterableSplitter[K] {
i =>
val iter = s
- var signalDelegate: Signalling = IdleSignalling
def hasNext = iter.hasNext
def next() = iter.next._1
def split = {
@@ -84,7 +83,6 @@ self =>
new IterableSplitter[V] {
i =>
val iter = s
- var signalDelegate: Signalling = IdleSignalling
def hasNext = iter.hasNext
def next() = iter.next._2
def split = {
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index d0f38b30dc..6a5ee5c69b 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -48,35 +48,6 @@ self =>
type SuperParIterator = IterableSplitter[T]
- /** An iterator that can be split into arbitrary subsets of iterators.
- * The self-type requirement ensures that the signal context passing behaviour gets mixed in
- * the concrete iterator instance in some concrete collection.
- *
- * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
- * `reverse2builder` method to ensure higher efficiency.
- */
- trait ParIterator extends SeqSplitter[T] with super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
- def split: Seq[ParIterator]
- def psplit(sizes: Int*): Seq[ParIterator]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator]
- extends ParIterator with super.SignalContextPassingIterator[IterRepr] {
- // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator`
- // to understand why we do the cast here, and have a type parameter.
- // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications.
- abstract override def psplit(sizes: Int*): Seq[IterRepr] = {
- val pits = super.psplit(sizes: _*)
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
-
/** A more refined version of the iterator found in the `ParallelIterable` trait,
* this iterator can be split into arbitrary subsets of iterators.
*
@@ -89,9 +60,7 @@ self =>
override def size = length
/** Used to iterate elements using indices */
- protected abstract class Elements(start: Int, val end: Int) extends ParIterator with BufferedIterator[T] {
- me: SignalContextPassingIterator[ParIterator] =>
-
+ protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] {
private var i = start
def hasNext = i < end
@@ -106,14 +75,14 @@ self =>
final def remaining = end - i
- def dup = new Elements(i, end) with SignalContextPassingIterator[ParIterator]
+ def dup = new Elements(i, end) {}
def split = psplit(remaining / 2, remaining - remaining / 2)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
for ((from, until) <- incr.init zip incr.tail) yield {
- new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParIterator]
+ new Elements(start + from, (start + until) min end) {}
}
}
@@ -138,7 +107,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1
+ executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1
}
/** Finds the first element satisfying some predicate.
@@ -156,7 +125,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))
+ executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))
}
/** Finds the last element satisfying some predicate.
@@ -174,7 +143,7 @@ self =>
val until = if (end >= length) length else end + 1
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx))
+ executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx))
}
def reverse: Repr = {
@@ -203,7 +172,7 @@ self =>
else if (pthat.length > length - offset) false
else {
val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter))
+ executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter))
}
} otherwise seq.startsWith(that, offset)
@@ -213,9 +182,9 @@ self =>
} otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence.
- *
+ *
* $abortsignalling
- *
+ *
* @tparam S the type of the elements of `that` sequence
* @param that the sequence to test
* @return `true` if this $coll has `that` as a suffix, `false` otherwise
@@ -226,7 +195,7 @@ self =>
else {
val ctx = new DefaultSignalling with VolatileAbort
val tlen = that.length
- executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter))
+ executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter))
}
} otherwise seq.endsWith(that)
@@ -235,13 +204,14 @@ self =>
if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
val that = patch.asParSeq
val pbf = bf.asParallel
- val pits = splitter.psplit(from, replaced, length - from - realreplaced)
- val copystart = new Copy[U, That](() => pbf(repr), pits(0))
+ val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced)
+ val cfactory = combinerFactory(() => pbf(repr))
+ val copystart = new Copy[U, That](cfactory, pits(0))
val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter)
+ val tsk = new that.Copy[U, That](cfactory, that.splitter)
tasksupport.executeAndWaitResult(tsk)
}
- val copyend = new Copy[U, That](() => pbf(repr), pits(2))
+ val copyend = new Copy[U, That](cfactory, pits(2))
executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
_.result
})
@@ -252,7 +222,7 @@ self =>
val from = 0 max fromarg
val b = bf(repr)
val repl = (r min (length - from)) max 0
- val pits = splitter.psplit(from, repl, length - from - repl)
+ val pits = splitter.psplitWithSignalling(from, repl, length - from - repl)
b ++= pits(0)
b ++= patch
b ++= pits(2)
@@ -372,7 +342,7 @@ self =>
} else result = (0, false)
protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p)
}
override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2)
@@ -391,7 +361,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
}
override def merge(that: IndexWhere) = result = if (result == -1) that.result else {
@@ -412,7 +382,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
}
override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else {
@@ -437,7 +407,7 @@ self =>
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
- protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U])
+ protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -448,7 +418,7 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op)
}
override def merge(that: SameElements[U]) = result = result && that.result
override def requiresStrictSplitters = true
@@ -460,7 +430,7 @@ self =>
def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
}
override def merge(that: Updated[U, That]) = result = result combine that.result
@@ -475,8 +445,8 @@ self =>
override def split = {
val fp = len / 2
val sp = len - len / 2
- val pits = pit.psplit(fp, sp)
- val opits = otherpit.psplit(fp, sp)
+ val pits = pit.psplitWithSignalling(fp, sp)
+ val opits = otherpit.psplitWithSignalling(fp, sp)
Seq(
new Zip(fp, pbf, pits(0), opits(0)),
new Zip(sp, pbf, pits(1), opits(1))
@@ -485,7 +455,7 @@ self =>
override def merge(that: Zip[U, S, That]) = result = result combine that.result
}
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S])
+ protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -496,7 +466,7 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op)
}
override def merge(that: Corresponds[S]) = result = result && that.result
override def requiresStrictSplitters = true
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index e04e0e9c72..8ed4583419 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
+import scala.collection.generic.IdleSignalling
import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
@@ -27,6 +28,11 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
* This method doesn't change the state of the iterator.
*/
def remaining: Int
+
+ /** For most collections, this is a cheap operation.
+ * Exceptions can override this method.
+ */
+ def isRemainingCheap = true
}
@@ -111,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += f(next)
cb
}
@@ -136,7 +142,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = {
- b.sizeHint(remaining)
+ if (isRemainingCheap) b.sizeHint(remaining)
while (hasNext) b += next
b
}
@@ -178,7 +184,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(n)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += next
cb
}
@@ -196,7 +202,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = {
before.sizeHint(at)
- after.sizeHint(remaining - at)
+ if (isRemainingCheap) after.sizeHint(remaining - at)
var left = at
while (left > 0) {
before += next
@@ -222,7 +228,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
val curr = next
if (p(curr)) before += curr
else {
- after.sizeHint(remaining + 1)
+ if (isRemainingCheap) after.sizeHint(remaining + 1)
after += curr
isBefore = false
}
@@ -262,7 +268,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining min otherpit.remaining)
+ if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
while (hasNext && otherpit.hasNext) {
cb += ((next, otherpit.next))
}
@@ -270,7 +276,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining max that.remaining)
+ if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
while (this.hasNext) cb += ((this.next, thatelem))
while (that.hasNext) cb += ((thiselem, that.next))
@@ -329,7 +335,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
/* transformers */
def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = {
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[T]()
while (hasNext) lst ::= next
while (lst != Nil) {
@@ -341,7 +347,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[S]()
while (hasNext) lst ::= f(next)
while (lst != Nil) {
@@ -353,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var j = 0
while (hasNext) {
if (j == index) {
@@ -380,12 +386,22 @@ extends AugmentedIterableIterator[T]
with DelegatedSignalling
{
self =>
-
+
+ var signalDelegate: Signalling = IdleSignalling
+
/** Creates a copy of this iterator. */
def dup: IterableSplitter[T]
def split: Seq[IterableSplitter[T]]
-
+
+ def splitWithSignalling: Seq[IterableSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel)
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
*
@@ -421,7 +437,6 @@ self =>
/* iterator transformers */
class Taken(taken: Int) extends IterableSplitter[T] {
- var signalDelegate = self.signalDelegate
var remaining = taken min self.remaining
def hasNext = remaining > 0
def next = { remaining -= 1; self.next }
@@ -450,7 +465,7 @@ self =>
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
class Mapped[S](f: T => S) extends IterableSplitter[S] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext
def next = f(self.next)
def remaining = self.remaining
@@ -461,7 +476,7 @@ self =>
override def map[S](f: T => S) = new Mapped(f)
class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
protected var curr: IterableSplitter[U] = self
def hasNext = if (curr.hasNext) true else if (curr eq self) {
curr = that
@@ -480,7 +495,7 @@ self =>
def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that)
class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
def remaining = self.remaining min that.remaining
@@ -497,7 +512,7 @@ self =>
class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
extends IterableSplitter[(U, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
if (that.hasNext) (self.next, that.next)
@@ -534,6 +549,18 @@ self =>
def split: Seq[SeqSplitter[T]]
def psplit(sizes: Int*): Seq[SeqSplitter[T]]
+ override def splitWithSignalling: Seq[SeqSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = {
+ val pits = psplit(sizes: _*)
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
* method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number
@@ -626,13 +653,13 @@ self =>
def reverse: SeqSplitter[T] = {
val pa = mutable.ParArray.fromTraversables(self).reverse
- new pa.ParArrayIterator with pa.SCPI {
+ new pa.ParArrayIterator {
override def reverse = self
}
}
class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
private[this] val trio = {
val pits = self.psplit(from, replaced, self.remaining - from - replaced)
(pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2)
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 873291fb2d..b705909cad 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -88,7 +88,7 @@ trait Tasks {
if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
} else if (this.throwable != null || that.throwable != null) {
- println("merging this thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
+ println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
}
}
@@ -118,7 +118,7 @@ trait Tasks {
/** Try to cancel the task.
* @return `true` if cancellation is successful.
*/
- def tryCancel: Boolean
+ def tryCancel(): Boolean
/** If the task has been cancelled successfully, those syncing on it may
* automatically be notified, depending on the implementation. If they
* aren't, this release method should be called after processing the
@@ -161,32 +161,39 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def split: Seq[TaskImpl[R, Tp]]
- def compute() = if (body.shouldSplitFurther) internal else body.tryLeaf(None)
+ def compute() = if (body.shouldSplitFurther) {
+ internal()
+ release()
+ } else {
+ body.tryLeaf(None)
+ release()
+ }
def internal() = {
var last = spawnSubtasks()
-
+
last.body.tryLeaf(None)
+ last.release()
body.result = last.body.result
body.throwable = last.body.throwable
-
+
while (last.next != null) {
// val lastresult = Option(last.body.result)
val beforelast = last
last = last.next
- if (last.tryCancel) {
+ if (last.tryCancel()) {
// println("Done with " + beforelast.body + ", next direct is " + last.body)
last.body.tryLeaf(Some(body.result))
- last.release
+ last.release()
} else {
// println("Done with " + beforelast.body + ", next sync is " + last.body)
- last.sync
+ last.sync()
}
// println("Merging " + body + " with " + last.body)
body.tryMerge(last.body.repr)
}
}
-
+
def spawnSubtasks() = {
var last: TaskImpl[R, Tp] = null
var head: TaskImpl[R, Tp] = this
@@ -196,7 +203,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
for (t <- subtasks.tail.reverse) {
t.next = last
last = t
- t.start
+ t.start()
}
} while (head.body.shouldSplitFurther);
head.next = last
@@ -230,12 +237,12 @@ trait ThreadPoolTasks extends Tasks {
// utb: var future: Future[_] = null
@volatile var owned = false
@volatile var completed = false
-
+
def start() = synchronized {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
executor.synchronized {
- incrTasks
+ incrTasks()
executor.submit(this)
}
}
@@ -249,9 +256,9 @@ trait ThreadPoolTasks extends Tasks {
//assert(executor.getCorePoolSize == (coresize + 1))
}
}
- if (!completed) this.wait
+ while (!completed) this.wait
}
- def tryCancel = synchronized {
+ def tryCancel() = synchronized {
// utb: future.cancel(false)
if (!owned) {
// debuglog("Cancelling " + body)
@@ -259,7 +266,7 @@ trait ThreadPoolTasks extends Tasks {
true
} else false
}
- def run = {
+ def run() = {
// utb: compute
var isOkToRun = false
synchronized {
@@ -270,17 +277,17 @@ trait ThreadPoolTasks extends Tasks {
}
if (isOkToRun) {
// debuglog("Running body of " + body)
- compute
- release
+ compute()
} else {
// just skip
// debuglog("skipping body of " + body)
}
}
- override def release = synchronized {
+ override def release() = synchronized {
+ //println("releasing: " + this + ", body: " + this.body)
completed = true
executor.synchronized {
- decrTasks
+ decrTasks()
}
this.notifyAll
}
@@ -305,10 +312,10 @@ trait ThreadPoolTasks extends Tasks {
val t = newTaskImpl(task)
// debuglog("-----------> Executing without wait: " + task)
- t.start
+ t.start()
() => {
- t.sync
+ t.sync()
t.body.forwardThrowable
t.body.result
}
@@ -318,9 +325,9 @@ trait ThreadPoolTasks extends Tasks {
val t = newTaskImpl(task)
// debuglog("-----------> Executing with wait: " + task)
- t.start
-
- t.sync
+ t.start()
+
+ t.sync()
t.body.forwardThrowable
t.body.result
}
@@ -369,7 +376,7 @@ trait FutureThreadPoolTasks extends Tasks {
def sync() = future.get
def tryCancel = false
def run = {
- compute
+ compute()
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 1fec522a93..7adf51cffb 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -52,7 +52,7 @@ self =>
protected[this] override def newCombiner = HashMapCombiner[K, V]
- def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size)
override def seq = trie
@@ -69,11 +69,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashMapIterator] =>
+ extends IterableSplitter[(K, V)] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -84,24 +81,24 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = {
- val phit = new ParHashMapIterator(it, sz) with SCPI
+ val phit = new ParHashMapIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashMapIterator(fst, fstlength) with SCPI,
- new ParHashMapIterator(snd, sndlength) with SCPI
+ new ParHashMapIterator(fst, fstlength),
+ new ParHashMapIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) }
}
def next(): (K, V) = {
i += 1
@@ -304,14 +301,21 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
evaluateCombiners(trie)
trie.asInstanceOf[HashMap[K, Repr]]
}
- private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): Unit = trie match {
+ private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match {
case hm1: HashMap.HashMap1[_, _] =>
- hm1.asInstanceOf[HashMap.HashMap1[K, Repr]].value = hm1.value.result
- hm1.kv = null
+ val evaledvalue = hm1.value.result
+ new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null)
case hmc: HashMap.HashMapCollision1[_, _] =>
- hmc.asInstanceOf[HashMap.HashMapCollision1[K, Repr]].kvs = hmc.kvs map { p => (p._1, p._2.result) }
- case htm: HashMap.HashTrieMap[_, _] =>
- for (hm <- htm.elems) evaluateCombiners(hm)
+ val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) }
+ new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs)
+ case htm: HashMap.HashTrieMap[k, v] =>
+ var i = 0
+ while (i < htm.elems.length) {
+ htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]]
+ i += 1
+ }
+ htm.asInstanceOf[HashMap[K, Repr]]
+ case empty => empty.asInstanceOf[HashMap[K, Repr]]
}
def split = {
val fp = howmany / 2
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index 8332167b90..1cf0ccd391 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -49,7 +49,7 @@ self =>
override def empty: ParHashSet[T] = new ParHashSet[T]
- def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size)
override def seq = trie
@@ -66,11 +66,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
-
class ParHashSetIterator(var triter: Iterator[T], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashSetIterator] =>
+ extends IterableSplitter[T] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -81,24 +78,24 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[T]) = {
- val phit = new ParHashSetIterator(it, sz) with SCPI
+ val phit = new ParHashSetIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashSetIterator(fst, fstlength) with SCPI,
- new ParHashSetIterator(snd, sndlength) with SCPI
+ new ParHashSetIterator(fst, fstlength),
+ new ParHashSetIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) }
}
def next(): T = {
i += 1
@@ -111,6 +108,7 @@ self =>
}
}
+
/** $factoryInfo
* @define Coll immutable.ParHashSet
* @define coll immutable parallel hash set
@@ -124,6 +122,7 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
def fromTrie[T](t: HashSet[T]) = new ParHashSet(t)
}
+
private[immutable] abstract class HashSetCombiner[T]
extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
@@ -207,6 +206,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
}
}
+
object HashSetCombiner {
def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {}
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 350e64739f..64e07ce4ff 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -10,6 +10,7 @@ package scala.collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.IterableSplitter
import scala.collection.Iterator
@@ -41,13 +42,10 @@ self =>
@inline final def apply(idx: Int) = range.apply(idx);
- def splitter = new ParRangeIterator with SCPI
-
- type SCPI = SignalContextPassingIterator[ParRangeIterator]
+ def splitter = new ParRangeIterator
class ParRangeIterator(range: Range = self.range)
- extends ParIterator {
- me: SignalContextPassingIterator[ParRangeIterator] =>
+ extends SeqSplitter[Int] {
override def toString = "ParRangeIterator(over: " + range + ")"
private var ind = 0
private val len = range.length
@@ -64,15 +62,15 @@ self =>
private def rangeleft = range.drop(ind)
- def dup = new ParRangeIterator(rangeleft) with SCPI
+ def dup = new ParRangeIterator(rangeleft)
def split = {
val rleft = rangeleft
val elemleft = rleft.length
- if (elemleft < 2) Seq(new ParRangeIterator(rleft) with SCPI)
+ if (elemleft < 2) Seq(new ParRangeIterator(rleft))
else Seq(
- new ParRangeIterator(rleft.take(elemleft / 2)) with SCPI,
- new ParRangeIterator(rleft.drop(elemleft / 2)) with SCPI
+ new ParRangeIterator(rleft.take(elemleft / 2)),
+ new ParRangeIterator(rleft.drop(elemleft / 2))
)
}
@@ -81,7 +79,7 @@ self =>
for (sz <- sizes) yield {
val fronttaken = rleft.take(sz)
rleft = rleft.drop(sz)
- new ParRangeIterator(fronttaken) with SCPI
+ new ParRangeIterator(fronttaken)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index fdeaefc3ff..5d9c431bc1 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -48,22 +48,19 @@ extends ParSeq[T]
def this() = this(Vector())
- type SCPI = SignalContextPassingIterator[ParVectorIterator]
-
def apply(idx: Int) = vector.apply(idx)
def length = vector.length
def splitter: SeqSplitter[T] = {
- val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI
+ val pit = new ParVectorIterator(vector.startIndex, vector.endIndex)
vector.initIterator(pit)
pit
}
override def seq: Vector[T] = vector
- class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator {
- self: SCPI =>
+ class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 7b1e39d092..63635537d7 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -22,23 +22,19 @@ package immutable {
override def seq = throw new UnsupportedOperationException
def update(idx: Int, elem: T) = throw new UnsupportedOperationException
- type SCPI = SignalContextPassingIterator[ParIterator]
-
- class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
-
+ class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] {
def remaining = until - i
def hasNext = i < until
def next = { i += 1; elem }
- def dup = new ParIterator(i, until, elem) with SCPI
+ def dup = new ParIterator(i, until, elem)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
- for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) with SCPI
+ for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem)
}
def split = psplit(remaining / 2, remaining - remaining / 2)
}
- def splitter = new ParIterator with SCPI
+ def splitter = new ParIterator
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index a1eb3beb0c..72a8184b10 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -19,6 +19,7 @@ import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.CHECK_RATE
import scala.collection.mutable.ArraySeq
@@ -74,17 +75,13 @@ self =>
override def seq = arrayseq
- type SCPI = SignalContextPassingIterator[ParArrayIterator]
-
protected[parallel] def splitter: ParArrayIterator = {
- val pit = new ParArrayIterator with SCPI
+ val pit = new ParArrayIterator
pit
}
class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array)
- extends super.ParIterator {
- me: SignalContextPassingIterator[ParArrayIterator] =>
-
+ extends SeqSplitter[T] {
def hasNext = i < until
def next = {
@@ -95,9 +92,9 @@ self =>
def remaining = until - i
- def dup = new ParArrayIterator(i, until, arr) with SCPI
+ def dup = new ParArrayIterator(i, until, arr)
- def psplit(sizesIncomplete: Int*): Seq[ParIterator] = {
+ def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = {
var traversed = i
val total = sizesIncomplete.reduceLeft(_ + _)
val left = remaining
@@ -106,19 +103,19 @@ self =>
val start = traversed
val end = (traversed + sz) min until
traversed = end
- new ParArrayIterator(start, end, arr) with SCPI
+ new ParArrayIterator(start, end, arr)
} else {
- new ParArrayIterator(traversed, traversed, arr) with SCPI
+ new ParArrayIterator(traversed, traversed, arr)
}
}
- override def split: Seq[ParIterator] = {
+ override def split: Seq[ParArrayIterator] = {
val left = remaining
if (left >= 2) {
val splitpoint = left / 2
val sq = Seq(
- new ParArrayIterator(i, i + splitpoint, arr) with SCPI,
- new ParArrayIterator(i + splitpoint, until, arr) with SCPI)
+ new ParArrayIterator(i, i + splitpoint, arr),
+ new ParArrayIterator(i + splitpoint, until, arr))
i = until
sq
} else {
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
new file mode 100644
index 0000000000..cec2e6886d
--- /dev/null
+++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala
@@ -0,0 +1,194 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel.mutable
+
+
+
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.IterableSplitter
+import scala.collection.mutable.BasicNode
+import scala.collection.mutable.TNode
+import scala.collection.mutable.LNode
+import scala.collection.mutable.CNode
+import scala.collection.mutable.SNode
+import scala.collection.mutable.INode
+import scala.collection.mutable.Ctrie
+import scala.collection.mutable.CtrieIterator
+
+
+
+/** Parallel Ctrie collection.
+ *
+ * It has its bulk operations parallelized, but uses the snapshot operation
+ * to create the splitter. This means that parallel bulk operations can be
+ * called concurrently with the modifications.
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ */
+final class ParCtrie[K, V] private[collection] (private val ctrie: Ctrie[K, V])
+extends ParMap[K, V]
+ with GenericParMapTemplate[K, V, ParCtrie]
+ with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]]
+ with ParCtrieCombiner[K, V]
+ with Serializable
+{
+ import collection.parallel.tasksupport._
+
+ def this() = this(new Ctrie)
+
+ override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie
+
+ override def empty: ParCtrie[K, V] = ParCtrie.empty
+
+ protected[this] override def newCombiner = ParCtrie.newCombiner
+
+ override def seq = ctrie
+
+ def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true)
+
+ override def clear() = ctrie.clear()
+
+ def result = this
+
+ def get(key: K): Option[V] = ctrie.get(key)
+
+ def put(key: K, value: V): Option[V] = ctrie.put(key, value)
+
+ def update(key: K, value: V): Unit = ctrie.update(key, value)
+
+ def remove(key: K): Option[V] = ctrie.remove(key)
+
+ def +=(kv: (K, V)): this.type = {
+ ctrie.+=(kv)
+ this
+ }
+
+ def -=(key: K): this.type = {
+ ctrie.-=(key)
+ this
+ }
+
+ override def size = {
+ val in = ctrie.readRoot()
+ val r = in.gcasRead(ctrie)
+ r match {
+ case tn: TNode[_, _] => tn.cachedSize(ctrie)
+ case ln: LNode[_, _] => ln.cachedSize(ctrie)
+ case cn: CNode[_, _] =>
+ executeAndWaitResult(new Size(0, cn.array.length, cn.array))
+ cn.cachedSize(ctrie)
+ }
+ }
+
+ override def stringPrefix = "ParCtrie"
+
+ /* tasks */
+
+ /** Computes Ctrie size in parallel. */
+ class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
+ var result = -1
+ def leaf(prev: Option[Int]) = {
+ var sz = 0
+ var i = offset
+ val until = offset + howmany
+ while (i < until) {
+ array(i) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ctrie)
+ }
+ i += 1
+ }
+ result = sz
+ }
+ def split = {
+ val fp = howmany / 2
+ Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array))
+ }
+ def shouldSplitFurther = howmany > 1
+ override def merge(that: Size) = result = result + that.result
+ }
+
+}
+
+
+private[collection] class ParCtrieSplitter[K, V](lev: Int, ct: Ctrie[K, V], mustInit: Boolean)
+extends CtrieIterator[K, V](lev, ct, mustInit)
+ with IterableSplitter[(K, V)]
+{
+ // only evaluated if `remaining` is invoked (which is not used by most tasks)
+ lazy val totalsize = ct.par.size
+ var iterated = 0
+
+ protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit)
+
+ override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
+ val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
+ level < maxsplits
+ }
+
+ def dup = {
+ val it = newIterator(0, ct, false)
+ dupTo(it)
+ it.iterated = this.iterated
+ it
+ }
+
+ override def next() = {
+ iterated += 1
+ super.next()
+ }
+
+ def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]]
+
+ override def isRemainingCheap = false
+
+ def remaining: Int = totalsize - iterated
+}
+
+
+/** Only used within the `ParCtrie`. */
+private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] {
+
+ def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
+ throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
+
+ val thiz = this.asInstanceOf[ParCtrie[K, V]]
+ val that = other.asInstanceOf[ParCtrie[K, V]]
+ val result = new ParCtrie[K, V]
+
+ result ++= thiz.iterator
+ result ++= that.iterator
+
+ result
+ }
+
+ override def canBeShared = true
+
+}
+
+
+object ParCtrie extends ParMapFactory[ParCtrie] {
+
+ def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V]
+
+ def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V]
+
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V]
+
+}
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 37065e32fc..21a5b05749 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -12,7 +12,6 @@ package mutable
-
import collection.generic._
import collection.mutable.DefaultEntry
import collection.mutable.HashEntry
@@ -56,7 +55,7 @@ self =>
override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
- def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
+ def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
override def size = tableSize
@@ -93,14 +92,11 @@ self =>
override def stringPrefix = "ParHashMap"
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
- extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) with ParIterator {
- me: SCPI =>
+ extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
- new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI
+ new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
private def writeObject(out: java.io.ObjectOutputStream) {
@@ -164,10 +160,11 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
import collection.parallel.tasksupport._
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
+ private var seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
- val hc = improve(elemHashCode(elem._1))
+ val hc = improve(elemHashCode(elem._1), seedvalue)
val pos = (hc >>> nonmasklen)
if (buckets(pos) eq null) {
// initialize bucket
@@ -180,7 +177,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024
// construct table
- val table = new AddingHashTable(size, tableLoadFactor)
+ val table = new AddingHashTable(size, tableLoadFactor, seedvalue)
val bucks = buckets.map(b => if (b ne null) b.headPtr else null)
val insertcount = executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length))
table.setSize(insertcount)
@@ -190,7 +187,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
} else {
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
- val table = new HashTable[K, DefaultEntry[K, V]] {
+ object table extends HashTable[K, DefaultEntry[K, V]] {
def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
sizeMapInit(table.length)
}
@@ -201,8 +198,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
}
i += 1
}
- val c = table.hashTableContents
- new ParHashMap(c)
+ new ParHashMap(table.hashTableContents)
}
/* classes */
@@ -215,11 +211,12 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
* and true if the key was successfully inserted. It does not update the number of elements
* in the table.
*/
- private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int) extends HashTable[K, DefaultEntry[K, V]] {
+ private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] {
import HashTable._
_loadFactor = lf
table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems)))
tableSize = 0
+ seedvalue = _seedvalue
threshold = newThreshold(_loadFactor, table.length)
sizeMapInit(table.length)
def setSize(sz: Int) = tableSize = sz
@@ -290,7 +287,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
insertcount
}
private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k))
+ val hc = improve(elemHashCode(k), seedvalue)
if ((hc >>> nonmasklen) != block) {
println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
assert((hc >>> nonmasklen) == block)
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 7763cdf318..6c5f513ad0 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -66,14 +66,11 @@ extends ParSet[T]
def contains(elem: T) = containsEntry(elem)
- def splitter = new ParHashSetIterator(0, table.length, size) with SCPI
-
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
+ def splitter = new ParHashSetIterator(0, table.length, size)
class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int)
- extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) with ParIterator {
- me: SCPI =>
- def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI
+ extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) {
+ def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total)
}
private def writeObject(s: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 9b8e233b95..8c93732427 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -29,7 +29,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
/** A parallel iterator returning all the entries.
*/
abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]]
- (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
+ (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
extends IterableSplitter[T] with SizeMapUtils {
private val itertable = table
private var traversed = 0
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index addc366072..8f19d0ecdb 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -83,6 +83,7 @@ package object parallel {
}
}
+
package parallel {
trait FactoryOps[From, Elem, To] {
trait Otherwise[R] {
@@ -113,11 +114,23 @@ package parallel {
}
/* classes */
-
+
+ trait CombinerFactory[U, Repr] {
+ /** Provides a combiner used to construct a collection. */
+ def apply(): Combiner[U, Repr]
+ /** The call to the `apply` method can create a new combiner each time.
+ * If it does, this method returns `false`.
+ * The same combiner factory may be used each time (typically, this is
+ * the case for concurrent collections, which are thread safe).
+ * If so, the method returns `true`.
+ */
+ def doesShareCombiners: Boolean
+ }
+
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
final case class CompositeThrowable(
val throwables: Set[Throwable]
- ) extends Throwable(
+ ) extends Exception(
"Multiple exceptions thrown during a parallel computation: " +
throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
)
@@ -127,8 +140,9 @@ package parallel {
* Automatically forwards the signal delegate when splitting.
*/
private[parallel] class BufferSplitter[T]
- (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling)
+ (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling)
extends IterableSplitter[T] {
+ signalDelegate = _sigdel
def hasNext = index < until
def next = {
val r = buffer(index)
@@ -182,22 +196,23 @@ package parallel {
* the receiver (which will be the return value).
*/
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
- (private val bucketnumber: Int)
+ (private val bucketnumber: Int)
extends Combiner[Elem, To] {
//self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
protected var sz: Int = 0
-
+
def size = sz
-
+
def clear() = {
buckets = new Array[UnrolledBuffer[Buck]](bucketnumber)
sz = 0
}
-
+
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
-
+
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
if (this eq other) this
else other match {
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 8533ed493f..d9cef0edb1 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -38,6 +38,9 @@ class Codec(val charSet: Charset) {
private[this] var _decodingReplacement: String = null
private[this] var _onCodingException: Handler = e => throw e
+ /** The name of the Codec. */
+ override def toString = name
+
// these methods can be chained to configure the variables above
def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
@@ -98,10 +101,11 @@ object Codec extends LowPriorityCodecImplicits {
}
@migration("This method was previously misnamed `toUTF8`. Converts from Array[Byte] to Array[Char].", "2.9.0")
- def fromUTF8(bytes: Array[Byte]): Array[Char] = {
- val bbuffer = java.nio.ByteBuffer wrap bytes
+ def fromUTF8(bytes: Array[Byte]): Array[Char] = fromUTF8(bytes, 0, bytes.length)
+ def fromUTF8(bytes: Array[Byte], offset: Int, len: Int): Array[Char] = {
+ val bbuffer = java.nio.ByteBuffer.wrap(bytes, offset, len)
val cbuffer = UTF8.charSet decode bbuffer
- val chars = new Array[Char](cbuffer.remaining())
+ val chars = new Array[Char](cbuffer.remaining())
cbuffer get chars
chars
@@ -109,7 +113,15 @@ object Codec extends LowPriorityCodecImplicits {
@migration("This method was previously misnamed `fromUTF8`. Converts from character sequence to Array[Byte].", "2.9.0")
def toUTF8(cs: CharSequence): Array[Byte] = {
- val cbuffer = java.nio.CharBuffer wrap cs
+ val cbuffer = java.nio.CharBuffer.wrap(cs, 0, cs.length)
+ val bbuffer = UTF8.charSet encode cbuffer
+ val bytes = new Array[Byte](bbuffer.remaining())
+ bbuffer get bytes
+
+ bytes
+ }
+ def toUTF8(chars: Array[Char], offset: Int, len: Int): Array[Byte] = {
+ val cbuffer = java.nio.CharBuffer.wrap(chars, offset, len)
val bbuffer = UTF8.charSet encode cbuffer
val bytes = new Array[Byte](bbuffer.remaining())
bbuffer get bytes
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 497de92c80..c1f45eccfb 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -33,8 +33,10 @@ object BigDecimal {
/** Cache ony for defaultMathContext using BigDecimals in a small range. */
private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
- object RoundingMode extends Enumeration(java.math.RoundingMode.values map (_.toString) : _*) with Serializable {
+ object RoundingMode extends Enumeration {
type RoundingMode = Value
+ // These are supposed to be the same as java.math.RoundingMode.values,
+ // though it seems unwise to rely on the correspondence.
val UP, DOWN, CEILING, FLOOR, HALF_UP, HALF_DOWN, HALF_EVEN, UNNECESSARY = Value
}
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 361e02cb16..8a53afaa62 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -309,7 +309,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
override def byteValue = intValue.toByte
/** Converts this BigInt to a <tt>short</tt>.
- * If the BigInt is too big to fit in a byte, only the low-order 16 bits are returned.
+ * If the BigInt is too big to fit in a short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigInt value as well as return a result with the opposite sign.
*/
@@ -323,7 +323,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def charValue = intValue.toChar
/** Converts this BigInt to an <tt>int</tt>.
- * If the BigInt is too big to fit in a char, only the low-order 32 bits
+ * If the BigInt is too big to fit in a int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
@@ -331,7 +331,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def intValue = this.bigInteger.intValue
/** Converts this BigInt to a <tt>long</tt>.
- * If the BigInt is too big to fit in a char, only the low-order 64 bits
+ * If the BigInt is too big to fit in a long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 0c5d10b15e..9425eba232 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -28,6 +28,14 @@ package object scala {
type NumberFormatException = java.lang.NumberFormatException
type AbstractMethodError = java.lang.AbstractMethodError
+ // A dummy used by the specialization annotation.
+ // Normally it's bad juju to place objects inside package objects,
+ // but there's no choice here as we'd have to be AnyRef's companion
+ // and defined in the same file - except there is no such file.
+ object AnyRef extends Specializable {
+ override def toString = "object AnyRef"
+ }
+
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
type serializable = annotation.serializable
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index acd28f04f5..466b57dea7 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -127,7 +127,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
def arrayManifest: ClassManifest[Array[T]] =
- ClassManifest.classType[Array[T]](arrayClass[T](erasure))
+ ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
@@ -220,7 +220,7 @@ object ClassManifest {
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
- case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
+ case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
}
diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala
deleted file mode 100644
index 52705d302c..0000000000
--- a/src/library/scala/reflect/Code.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) {
- val manifest = implicitly[Manifest[T]]
- override def toString = "Code(tree = "+tree+", manifest = "+manifest+")"
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-object Code {
- def lift[A](tree: A): Code[A] =
- throw new Error("Code was not lifted by compiler")
-}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index df5f64cdf6..6c02878b19 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -44,7 +44,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
override def arrayManifest: Manifest[Array[T]] =
- Manifest.classType[Array[T]](arrayClass[T](erasure))
+ Manifest.classType[Array[T]](arrayClass[T](erasure), this)
override def canEqual(that: Any): Boolean = that match {
case _: Manifest[_] => true
@@ -60,7 +60,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
override def hashCode = this.erasure.##
}
-trait AnyValManifest[T] extends Manifest[T] with Equals {
+abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
override def <:<(that: ClassManifest[_]): Boolean =
(that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal)
override def canEqual(other: Any) = other match {
@@ -68,7 +68,7 @@ trait AnyValManifest[T] extends Manifest[T] with Equals {
case _ => false
}
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
+ override val hashCode = System.identityHashCode(this)
}
/** The object `Manifest` defines factory methods for manifests.
@@ -76,127 +76,104 @@ trait AnyValManifest[T] extends Manifest[T] with Equals {
* in client code.
*/
object Manifest {
- private def ObjectClass = classOf[java.lang.Object]
+ def valueManifests: List[AnyValManifest[_]] =
+ List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
- val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte] {
+ val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
def erasure = java.lang.Byte.TYPE
- override def toString = "Byte"
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
private def readResolve(): Any = Manifest.Byte
}
- val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short] {
+ val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
def erasure = java.lang.Short.TYPE
- override def toString = "Short"
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
private def readResolve(): Any = Manifest.Short
}
- val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char] {
+ val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
def erasure = java.lang.Character.TYPE
- override def toString = "Char"
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
private def readResolve(): Any = Manifest.Char
}
- val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int] {
+ val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
def erasure = java.lang.Integer.TYPE
- override def toString = "Int"
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
private def readResolve(): Any = Manifest.Int
}
- val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long] {
+ val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
def erasure = java.lang.Long.TYPE
- override def toString = "Long"
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
private def readResolve(): Any = Manifest.Long
}
- val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float] {
+ val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
def erasure = java.lang.Float.TYPE
- override def toString = "Float"
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
private def readResolve(): Any = Manifest.Float
}
- val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double] {
+ val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
def erasure = java.lang.Double.TYPE
- override def toString = "Double"
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
private def readResolve(): Any = Manifest.Double
}
- val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean] {
+ val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
def erasure = java.lang.Boolean.TYPE
- override def toString = "Boolean"
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
private def readResolve(): Any = Manifest.Boolean
}
- val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit] {
+ val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
def erasure = java.lang.Void.TYPE
- override def toString = "Unit"
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
private def readResolve(): Any = Manifest.Unit
}
- val Any: Manifest[Any] = new ClassTypeManifest[scala.Any](None, ObjectClass, Nil) {
- override def toString = "Any"
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Any
}
- val Object: Manifest[Object] = new ClassTypeManifest[java.lang.Object](None, ObjectClass, Nil) {
- override def toString = "Object"
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Object
}
- val AnyVal: Manifest[AnyVal] = new ClassTypeManifest[scala.AnyVal](None, ObjectClass, Nil) {
- override def toString = "AnyVal"
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.AnyVal
}
- val Null: Manifest[Null] = new ClassTypeManifest[scala.Null](None, ObjectClass, Nil) {
- override def toString = "Null"
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Null
}
- val Nothing: Manifest[Nothing] = new ClassTypeManifest[scala.Nothing](None, ObjectClass, Nil) {
- override def toString = "Nothing"
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Nothing
}
@@ -231,6 +208,11 @@ object Manifest {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+ private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override val hashCode = System.identityHashCode(this)
+ }
+
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
@@ -240,7 +222,7 @@ object Manifest {
val clazz = classToSymbol(erasure)
val pre = prefix match {
case Some(pm) => pm.tpe
- case None => clazz.owner.thisType
+ case None => clazz.owner.thisPrefix
}
namedType(pre, clazz, typeArguments map (_.tpe))
}
diff --git a/src/library/scala/reflect/ReflectionUtils.scala b/src/library/scala/reflect/ReflectionUtils.scala
index b63a8645de..dfadfb4976 100644
--- a/src/library/scala/reflect/ReflectionUtils.scala
+++ b/src/library/scala/reflect/ReflectionUtils.scala
@@ -27,11 +27,15 @@ object ReflectionUtils {
case ex if pf isDefinedAt unwrapThrowable(ex) => pf(unwrapThrowable(ex))
}
- // Retrieves the MODULE$ field for the given class name.
- def singletonInstance(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] = {
+ def singletonInstance(className: String, cl: ClassLoader = getClass.getClassLoader): AnyRef = {
val name = if (className endsWith "$") className else className + "$"
+ val clazz = java.lang.Class.forName(name, true, cl)
+ val singleton = clazz getField "MODULE$" get null
+ singleton
+ }
- try Some(java.lang.Class.forName(name, true, cl) getField "MODULE$" get null)
+ // Retrieves the MODULE$ field for the given class name.
+ def singletonInstanceOpt(className: String, cl: ClassLoader = getClass.getClassLoader): Option[AnyRef] =
+ try Some(singletonInstance(className, cl))
catch { case _: ClassNotFoundException => None }
- }
}
diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala
index 53ac84f8cb..448dca752c 100644
--- a/src/library/scala/reflect/api/Mirror.scala
+++ b/src/library/scala/reflect/api/Mirror.scala
@@ -3,53 +3,59 @@ package api
/** A mirror establishes connections of
* runtime entities such as class names and object instances
- * with a refexive universe.
+ * with a reflexive universe.
*/
trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil {
/** The Scala class symbol that has given fully qualified name
* @param name The fully qualified name of the class to be returned
- * @throws java.lang.ClassNotFoundException if no class wiht that name exists
+ * @throws java.lang.ClassNotFoundException if no class with that name exists
* to do: throws anything else?
*/
- def classWithName(name: String): Symbol
-
- /** The Scala class symbol corresponding to the runtime class of given object
- * @param The object from which the class is returned
+ def symbolForName(name: String): Symbol
+
+ /** Return a reference to the companion object of the given class symbol.
+ */
+ def companionInstance(clazz: Symbol): AnyRef
+
+ /** The Scala class symbol corresponding to the runtime class of the given instance.
+ * @param instance The instance
+ * @return The class Symbol for the instance
* @throws ?
*/
- def getClass(obj: AnyRef): Symbol
+ def symbolOfInstance(instance: Any): Symbol
- /** The Scala type corresponding to the runtime type of given object.
+ /** The Scala type corresponding to the runtime type of given instance.
* If the underlying class is parameterized, this will be an existential type,
* with unknown type arguments.
*
- * @param The object from which the type is returned
+ * @param instance The instance.
+ * @return The Type of the given instance.
* @throws ?
*/
- def getType(obj: AnyRef): Type
+ def typeOfInstance(instance: Any): Type
/** The value of a field on a receiver instance.
* @param receiver The receiver instance
* @param field The field
* @return The value contained in `receiver.field`.
*/
- def getValue(receiver: AnyRef, field: Symbol): Any
+ def getValueOfField(receiver: AnyRef, field: Symbol): Any
/** Sets the value of a field on a receiver instance.
* @param receiver The receiver instance
* @param field The field
* @param value The new value to be stored in the field.
*/
- def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit
+ def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit
- /** Invokes a method on a reciver instance with some arguments
+ /** Invokes a method on a receiver instance with some arguments
* @param receiver The receiver instance
* @param meth The method
* @param args The method call's arguments
* @return The result of invoking `receiver.meth(args)`
*/
- def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any
+ def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any
/** Maps a Java class to a Scala type reference
* @param clazz The Java class object
diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala
index 8569b103cf..c0123ed955 100644
--- a/src/library/scala/reflect/api/Modifier.scala
+++ b/src/library/scala/reflect/api/Modifier.scala
@@ -1,11 +1,82 @@
package scala.reflect.api
-object Modifier extends Enumeration {
+import collection.{ immutable, mutable }
- val `protected`, `private`, `override`, `abstract`, `final`,
- `sealed`, `implicit`, `lazy`, `macro`, `case`, `trait`,
- deferred, interface, mutable, parameter, covariant, contravariant,
- preSuper, abstractOverride, local, java, static, caseAccessor,
- defaultParameter, defaultInit, paramAccessor, bynameParameter = Value
+sealed abstract class Modifier {
+ def name: String
+ def isKeyword: Boolean
+ def sourceString: String = if (isKeyword) "`" + name + "`" else name
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = name.hashCode
+ override def toString = name
+}
+final class SymbolModifier private (val name: String, val isKeyword: Boolean) extends Modifier {
+ def this(name: String) = this(name, false)
+}
+final class SourceModifier private (val name: String) extends Modifier {
+ def isKeyword = true
+}
+
+object SymbolModifier {
+ private val seen = mutable.ListBuffer[SymbolModifier]()
+ private[api] def apply(name: String): SymbolModifier = {
+ val mod = name match {
+ case "case" | "trait" => new SymbolModifier(name, isKeyword = true)
+ case _ => new SymbolModifier(name)
+ }
+ seen += mod
+ mod
+ }
+ private[api] def all = seen.toList
+}
+object SourceModifier {
+ private val seen = mutable.ListBuffer[SourceModifier]()
+ private[api] def apply(name: String): SourceModifier = {
+ val mod = new SourceModifier(name)
+ seen += mod
+ mod
+ }
+ private[api] def all = seen.toList
+}
+
+object Modifier extends immutable.Set[Modifier] {
+ val `abstract` = SourceModifier("abstract")
+ val `final` = SourceModifier("final")
+ val `implicit` = SourceModifier("implicit")
+ val `lazy` = SourceModifier("lazy")
+ val `macro` = SourceModifier("macro")
+ val `override` = SourceModifier("override")
+ val `private` = SourceModifier("private")
+ val `protected` = SourceModifier("protected")
+ val `sealed` = SourceModifier("sealed")
+
+ val `case` = SymbolModifier("case")
+ val `trait` = SymbolModifier("trait")
+ val abstractOverride = SymbolModifier("abstractOverride")
+ val bynameParameter = SymbolModifier("bynameParameter")
+ val caseAccessor = SymbolModifier("caseAccessor")
+ val contravariant = SymbolModifier("contravariant")
+ val covariant = SymbolModifier("covariant")
+ val defaultInit = SymbolModifier("defaultInit")
+ val defaultParameter = SymbolModifier("defaultParameter")
+ val deferred = SymbolModifier("deferred")
+ val interface = SymbolModifier("interface")
+ val java = SymbolModifier("java")
+ val local = SymbolModifier("local")
+ val mutable = SymbolModifier("mutable")
+ val paramAccessor = SymbolModifier("paramAccessor")
+ val parameter = SymbolModifier("parameter")
+ val preSuper = SymbolModifier("preSuper")
+ val static = SymbolModifier("static")
+
+ val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet
+ val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet
+ val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers
+ def values = allModifiers
+
+ def contains(key: Modifier) = allModifiers(key)
+ def iterator = allModifiers.iterator
+ def -(elem: Modifier) = allModifiers - elem
+ def +(elem: Modifier) = allModifiers + elem
}
diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala
index e226d2265a..3a00f21c8c 100755
--- a/src/library/scala/reflect/api/Names.scala
+++ b/src/library/scala/reflect/api/Names.scala
@@ -1,32 +1,66 @@
package scala.reflect
package api
+/** A trait that manages names.
+ * A name is a string in one of two name universes: terms and types.
+ * The same string can be a name in both universes.
+ * Two names are equal if they represent the same string and they are
+ * members of the same universe.
+ *
+ * Names are interned. That is, for two names `name11 and `name2`,
+ * `name1 == name2` implies `name1 eq name2`.
+ */
trait Names {
-
+ /** The abstract type of names */
type Name >: Null <: AbsName
+
+ /** The abstract type of names representing terms */
type TypeName <: Name
+
+ /** The abstract type of names representing types */
type TermName <: Name
abstract class AbsName {
+ /** Is this name a term name? */
def isTermName: Boolean
+
+ /** Is this name a type name? */
def isTypeName: Boolean
+
+ /** Returns a term name that represents the same string as this name */
def toTermName: TermName
+
+ /** Returns a type name that represents the same string as this name */
def toTypeName: TypeName
- /** Replace all occurrences of $op_names in this name by corresponding operator symbols.
+ /** Replaces all occurrences of $op_names in this name by corresponding operator symbols.
* Example: `foo_+=` becomes `foo_$plus$eq`.
*/
- def decode: String
+ def decoded: String
- /** Replace all occurrences of operator symbols in this name by corresponding $op_names.
+ /** Replaces all occurrences of operator symbols in this name by corresponding $op_names.
* Example: `foo_$plus$eq` becomes `foo_+=`
*/
- def encode: Name
+ def encoded: String
+
+ /** The decoded name, still represented as a name.
+ */
+ def decodedName: Name
+
+ /** The encoded name, still represented as a name.
+ */
+ def encodedName: Name
}
+ /** Create a new term name.
+ */
def newTermName(s: String): TermName
+
+ /** Creates a new type name.
+ */
def newTypeName(s: String): TypeName
def EmptyTermName: TermName = newTermName("")
+
def EmptyTypeName: TypeName = EmptyTermName.toTypeName
}
diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala
index 6b480ab83d..e737b0ea4f 100755
--- a/src/library/scala/reflect/api/StandardDefinitions.scala
+++ b/src/library/scala/reflect/api/StandardDefinitions.scala
@@ -11,16 +11,11 @@ trait StandardDefinitions { self: Universe =>
val definitions: AbsDefinitions
abstract class AbsDefinitions {
- // outer packages and their classes
- // Under consideration
- // def RootPackage: Symbol
-
+ // packages
+ def RootPackage: Symbol
def RootClass: Symbol
def EmptyPackage: Symbol
- def EmptyPackageClass: Symbol
-
def ScalaPackage: Symbol
- def ScalaPackageClass: Symbol
// top types
def AnyClass : Symbol
@@ -48,20 +43,27 @@ trait StandardDefinitions { self: Universe =>
def StringClass : Symbol
def ClassClass : Symbol
+ // product, tuple, function
+ def TupleClass : Array[Symbol]
+ def ProductClass : Array[Symbol]
+ def FunctionClass : Array[Symbol]
+
// fundamental modules
def PredefModule: Symbol
- // fundamental type constructions
- def ClassType(arg: Type): Type
+ /** Given a type T, returns the type corresponding to the VM's
+ * representation: ClassClass's type constructor applied to `arg`.
+ */
+ def vmClassType(arg: Type): Type // !!! better name?
/** The string representation used by the given type in the VM.
*/
- def signature(tp: Type): String
+ def vmSignature(sym: Symbol, info: Type): String
/** Is symbol one of the value classes? */
- def isValueClass(sym: Symbol): Boolean
+ def isValueClass(sym: Symbol): Boolean // !!! better name?
/** Is symbol one of the numeric value classes? */
- def isNumericValueClass(sym: Symbol): Boolean
+ def isNumericValueClass(sym: Symbol): Boolean // !!! better name?
}
}
diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala
new file mode 100644
index 0000000000..81517d2a6b
--- /dev/null
+++ b/src/library/scala/reflect/api/StandardNames.scala
@@ -0,0 +1,21 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package api
+
+trait StandardNames { self: Universe =>
+
+ val nme: AbsTermNames
+
+ abstract class AbsTermNames {
+ val CONSTRUCTOR: TermName
+ }
+
+ val tpnme: AbsTypeNames
+
+ abstract class AbsTypeNames {
+ }
+}
diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala
index 31bcdebe7e..15d754b5b4 100755
--- a/src/library/scala/reflect/api/Symbols.scala
+++ b/src/library/scala/reflect/api/Symbols.scala
@@ -9,27 +9,35 @@ trait Symbols { self: Universe =>
/** The modifiers of this symbol
*/
- def allModifiers: Set[Modifier.Value]
+ def modifiers: Set[Modifier]
/** Does this symbol have given modifier?
*/
- def hasModifier(mod: Modifier.Value): Boolean
+ def hasModifier(mod: Modifier): Boolean
- /** The owner of this symbol.
+ /** A list of annotations attached to this Symbol.
*/
- def owner: Symbol
-
- /** The name of the symbol as a member of the `Name` type.
+ def annotations: List[self.AnnotationInfo]
+
+ /** Whether this symbol carries an annotation for which the given
+ * symbol is its typeSymbol.
*/
- def name: Name
+ def hasAnnotation(sym: Symbol): Boolean
- /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
+ /** The owner of this symbol. This is the symbol
+ * that directly contains the current symbol's definition.
+ * The `NoSymbol` symbol does not have an owner, and calling this method
+ * on one causes an internal error.
+ * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]]
+ * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`.
+ * Every other symbol has a chain of owners that ends in
+ * [[scala.reflect.api.mirror.RootClass]].
*/
- def encodedName: String
+ def owner: Symbol
- /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
+ /** The name of the symbol as a member of the `Name` type.
*/
- def decodedName: String
+ def name: Name
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by periods.
@@ -59,115 +67,137 @@ trait Symbols { self: Universe =>
*
* The java access levels translate as follows:
*
- * java private: hasFlag(PRIVATE) && !hasAccessBoundary
- * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package)
- * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package)
- * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary
+ * java private: hasFlag(PRIVATE) && (privateWithin == NoSymbol)
+ * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosingPackage)
+ * java protected: hasFlag(PROTECTED) && (privateWithin == enclosingPackage)
+ * java public: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == NoSymbol)
*/
def privateWithin: Symbol
- /** Whether this symbol has a "privateWithin" visibility barrier attached.
+ /** For a class: the module or case class factory with the same name in the same package.
+ * For a module: the class with the same name in the same package.
+ * For all others: NoSymbol
*/
- def hasAccessBoundary: Boolean
+ def companionSymbol: Symbol
- /** A list of annotations attached to this Symbol.
+ /** If symbol is an object definition, its implied associated class,
+ * otherwise NoSymbol
*/
- def annotations: List[self.AnnotationInfo]
+ def moduleClass: Symbol // needed for LiftCode
- /** The type of the symbol
+ /** If this symbol is a top-level class, this symbol; otherwise the next enclosing
+ * top-level class, or `NoSymbol` if none exists.
*/
- def tpe: Type
+ def enclosingTopLevelClass: Symbol
- /** The info of the symbol. This is like tpe, except for class symbols where the `info`
- * describes the contents of the class whereas the `tpe` is a reference to the class.
+ /** If this symbol is a class, this symbol; otherwise the next enclosing
+ * class, or `NoSymbol` if none exists.
*/
- def info: Type
+ def enclosingClass: Symbol
- /** If this symbol is a class or trait, its self type, otherwise the type of the symbol itself
+ /** If this symbol is a method, this symbol; otherwise the next enclosing
+ * method, or `NoSymbol` if none exists.
*/
- def typeOfThis: Type
-
- /** The type `C.this`, where `C` is the current class.
+ def enclosingMethod: Symbol
+
+ /** If this symbol is a package class, this symbol; otherwise the next enclosing
+ * package class, or `NoSymbol` if none exists.
*/
- def thisType: Type
+ def enclosingPackageClass: Symbol
- /** For a class: the module or case class factory with the same name in the same package.
- * For all others: NoSymbol
+ /** Does this symbol represent the definition of term?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym`, either `sym.isTerm` is true
+ * or `sym.isType` is true.
*/
- def companionModule: Symbol
+ def isTerm : Boolean
- /** For a module: the class with the same name in the same package.
- * For all others: NoSymbol
+ /** Does this symbol represent the definition of type?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym`, either `sym.isTerm` is true
+ * or `sym.isType` is true.
*/
- def companionClass: Symbol
+ def isType : Boolean
- /** The module corresponding to this module class (note that this
- * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
+ /** Does this symbol represent the definition of class?
+ * If yes, `isType` is also guaranteed to be true.
*/
- def sourceModule: Symbol
+ def isClass : Boolean
- /** If symbol is an object definition, its implied associated class,
- * otherwise NoSymbol
+ /** Does this symbol represent the definition of a type alias?
+ * If yes, `isType` is also guaranteed to be true.
*/
- def moduleClass: Symbol // needed for LiftCode
-
- /** The top-level class containing this symbol. */
- def toplevelClass: Symbol
-
- /** The next enclosing class */
- def enclClass : Symbol
-
- /** The next enclosing method */
- def enclMethod : Symbol
-
- def isTerm : Boolean
- def isType : Boolean
- def isClass : Boolean
def isAliasType : Boolean
+
+ /** Does this symbol represent the definition of an abstract type?
+ * If yes, `isType` is also guaranteed to be true.
+ */
def isAbstractType : Boolean
/** The type signature of this symbol.
- * Note if symbol is a member of a class, one almost always is interested
- * in `typeSigIn` with a site type instead.
+ * Note if the symbol is a member of a class, one almost always is interested
+ * in `typeSignatureIn` with a site type instead.
*/
- def typeSig: Type
+ def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name.
/** The type signature of this symbol seen as a member of given type `site`.
*/
- def typeSigIn(site: Type): Type
-
- /** The type constructor corresponding to this type symbol.
- */
- def asTypeConstructor: Type // needed by LiftCode
+ def typeSignatureIn(site: Type): Type
- /** A type reference that refers to this type symbol
+ /** A type reference that refers to this type symbol
* Note if symbol is a member of a class, one almost always is interested
* in `asTypeIn` with a site type instead.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.asType` is the type `C[T]`.
+ *
+ * By contrast, `C.typeSignature` would be a type signature of form
+ * `PolyType(ClassInfoType(...))` that describes type parameters, value
+ * parameters, parent types, and members of `C`.
*/
- def asType: Type
+ def asType: Type // !!! Same as typeSignature.
- /** A type reference that refers to this type symbol seen as a member of given type `site`.
+ /** A type reference that refers to this type symbol seen
+ * as a member of given type `site`.
*/
def asTypeIn(site: Type): Type
- /** A fresh symbol with given position `pos` and name `name` that has
- * the current symbol as its owner.
+ /** The type constructor corresponding to this type symbol.
+ * This is different from `asType` in that type parameters
+ * are part of results of `asType`, but not of `asTypeConstructor`.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`.
+ */
+ def asTypeConstructor: Type // needed by LiftCode
+
+ /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`.
+ */
+ def thisPrefix: Type
+
+ /** If this symbol is a class or trait, its self type, otherwise the type
+ * of the symbol itself.
*/
- def newNestedSymbol(pos: Position, name: Name): Symbol // needed by LiftCode
+ def selfType: Type
+ /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
+ * the current symbol as its owner.
+ */
+ def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode
+
/** Low-level operation to set the symbol's flags
* @return the symbol itself
*/
- def setInternalFlags(flags: Long): this.type // needed by LiftCode
+ def setInternalFlags(flags: Long): this.type // needed by LiftCode !!! not enough reason to have in the api
/** Set symbol's type signature to given type
* @return the symbol itself
*/
- def setTypeSig(tpe: Type): this.type // needed by LiftCode
+ def setTypeSignature(tpe: Type): this.type // needed by LiftCode !!! not enough reason to have in the api
/** Set symbol's annotations to given annotations `annots`.
*/
- def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode
+ def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode !!! not enough reason to have in the api
}
val NoSymbol: Symbol
diff --git a/src/library/scala/reflect/api/TreeBuildUtil.scala b/src/library/scala/reflect/api/TreeBuildUtil.scala
index b437824925..f28008bc21 100644
--- a/src/library/scala/reflect/api/TreeBuildUtil.scala
+++ b/src/library/scala/reflect/api/TreeBuildUtil.scala
@@ -3,19 +3,19 @@ package scala.reflect.api
trait TreeBuildUtil extends Universe {
/** The symbol corresponding to the globally accessible class with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def staticClass(fullname: String): Symbol
+ def staticClass(fullName: String): Symbol
/** The symbol corresponding to the globally accessible object with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def staticModule(fullname: String): Symbol
+ def staticModule(fullName: String): Symbol
/** The this-ptype of the globally accessible object with the
- * given fully qualified name `fullname`.
+ * given fully qualified name `fullName`.
*/
- def thisModuleType(fullname: String): Type
+ def thisModuleType(fullName: String): Type
/** Selects type symbol with given simple name `name` from the defined members of `owner`.
*/
@@ -38,7 +38,7 @@ trait TreeBuildUtil extends Universe {
* @param tsig the type signature of the free variable
* @param value the value of the free variable at runtime
*/
- def freeVar(name: String, tsig: Type, value: Any): Symbol
+ def newFreeVar(name: String, info: Type, value: Any): Symbol
/** Create a Modiiers structure given internal flags, qualifier, annotations */
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 15fba0e418..19bfd09b81 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -28,6 +28,8 @@ trait TreePrinters { self: Universe =>
*/
def newTreePrinter(out: PrintWriter): TreePrinter
+ // emits more or less verbatim representation of the provided tree
+ // todo. when LiftCode becomes a macro, throw this code away and use that macro
class RawTreePrinter(out: PrintWriter) extends TreePrinter {
def print(args: Any*): Unit = args foreach {
case EmptyTree =>
@@ -48,10 +50,35 @@ trait TreePrinters { self: Universe =>
case arg =>
print(arg)
}
- print(if (it.hasNext) ", " else ")")
+ print(if (it.hasNext) ", " else "")
}
+ print(")")
if (typesPrinted)
print(".setType(", tree.tpe, ")")
+ case list: List[_] =>
+ print("List(")
+ val it = list.iterator
+ while (it.hasNext) {
+ print(it.next())
+ print(if (it.hasNext) ", " else "")
+ }
+ print(")")
+ case mods: Modifiers =>
+ val parts = collection.mutable.ListBuffer[String]()
+ parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")"
+ parts += "newTypeName(\"" + mods.privateWithin.toString + "\")"
+ parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")"
+
+ var keep = 3
+ if (keep == 3 && mods.annotations.isEmpty) keep -= 1
+ if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1
+ if (keep == 1 && mods.modifiers.isEmpty) keep -= 1
+
+ print("Modifiers(", parts.take(keep).mkString(", "), ")")
+ case name: Name =>
+ if (name.isTermName) print("newTermName(\"") else print("newTypeName(\"")
+ print(name.toString)
+ print("\")")
case arg =>
out.print(arg)
}
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index e5502acb20..2c960392ec 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -8,23 +8,22 @@ package api
import scala.collection.mutable.ListBuffer
-//import scala.tools.nsc.util.{ FreshNameCreator, HashSet, SourceFile }
-
-trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
+// Syncnote: Trees are currently not thread-safe.
+trait Trees { self: Universe =>
private[scala] var nodeCount = 0
type Modifiers <: AbsModifiers
abstract class AbsModifiers {
- def hasModifier(mod: Modifier.Value): Boolean
- def allModifiers: Set[Modifier.Value]
+ def modifiers: Set[Modifier]
+ def hasModifier(mod: Modifier): Boolean
def privateWithin: Name // default: EmptyTypeName
def annotations: List[Tree] // default: List()
def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers
}
- def Modifiers(mods: Set[Modifier.Value] = Set(),
+ def Modifiers(mods: Set[Modifier] = Set(),
privateWithin: Name = EmptyTypeName,
annotations: List[Tree] = List()): Modifiers
@@ -440,6 +439,12 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
case class Assign(lhs: Tree, rhs: Tree)
extends TermTree
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by typecheck (doTypedApply)
+ */
+ case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
+ extends TermTree
+
/** Conditional expression */
case class If(cond: Tree, thenp: Tree, elsep: Tree)
extends TermTree
@@ -477,6 +482,17 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
*/
case class New(tpt: Tree) extends TermTree
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
+ */
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
+ assert(!argss.isEmpty)
+ // todo. we need to expose names in scala.reflect.api
+// val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
+ val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
+ (superRef /: argss) (Apply)
+ }
+
/** Type annotation, eliminated by explicit outer */
case class Typed(expr: Tree, tpt: Tree)
extends TermTree
@@ -538,15 +554,24 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
// The symbol of a This is the class to which the this refers.
// For instance in C.this, it would be C.
+ def This(sym: Symbol): Tree =
+ This(sym.name.toTypeName) setSymbol sym
+
/** Designator <qualifier> . <name> */
case class Select(qualifier: Tree, name: Name)
extends RefTree
+ def Select(qualifier: Tree, name: String): Select =
+ Select(qualifier, newTermName(name))
+
def Select(qualifier: Tree, sym: Symbol): Select =
Select(qualifier, sym.name) setSymbol sym
/** Identifier <name> */
- case class Ident(name: Name) extends RefTree { }
+ case class Ident(name: Name) extends RefTree
+
+ def Ident(name: String): Ident =
+ Ident(newTermName(name))
def Ident(sym: Symbol): Ident =
Ident(sym.name) setSymbol sym
@@ -624,10 +649,10 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
}
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
-
+
/** An empty deferred value definition corresponding to:
* val _: _
- * This is used as a placeholder in the `self` parameter Template if there is
+ * This is used as a placeholder in the `self` parameter Template if there is
* no definition of a self value of self type.
*/
def emptyValDef: ValDef
@@ -697,6 +722,8 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
}
case Assign(lhs, rhs) =>
traverse(lhs); traverse(rhs)
+ case AssignOrNamedArg(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
case If(cond, thenp, elsep) =>
traverse(cond); traverse(thenp); traverse(elsep)
case Match(selector, cases) =>
@@ -795,6 +822,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function
def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If
def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match
def Return(tree: Tree, expr: Tree): Return
@@ -857,6 +885,8 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
new Function(vparams, body).copyAttrs(tree)
def Assign(tree: Tree, lhs: Tree, rhs: Tree) =
new Assign(lhs, rhs).copyAttrs(tree)
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
+ new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) =
new If(cond, thenp, elsep).copyAttrs(tree)
def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) =
@@ -1002,6 +1032,11 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
if (lhs0 == lhs) && (rhs0 == rhs) => t
case _ => treeCopy.Assign(tree, lhs, rhs)
}
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
+ case t @ AssignOrNamedArg(lhs0, rhs0)
+ if (lhs0 == lhs) && (rhs0 == rhs) => t
+ case _ => treeCopy.AssignOrNamedArg(tree, lhs, rhs)
+ }
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = tree match {
case t @ If(cond0, thenp0, elsep0)
if (cond0 == cond) && (thenp0 == thenp) && (elsep0 == elsep) => t
@@ -1121,9 +1156,9 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
abstract class Transformer {
val treeCopy: TreeCopier = newLazyTreeCopier
protected var currentOwner: Symbol = definitions.RootClass
- protected def currentMethod = currentOwner.enclMethod
- protected def currentClass = currentOwner.enclClass
- protected def currentPackage = currentOwner.toplevelClass.owner
+ protected def currentMethod = currentOwner.enclosingMethod
+ protected def currentClass = currentOwner.enclosingClass
+ protected def currentPackage = currentOwner.enclosingTopLevelClass.owner
def transform(tree: Tree): Tree = tree match {
case EmptyTree =>
tree
@@ -1186,6 +1221,8 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
}
case Assign(lhs, rhs) =>
treeCopy.Assign(tree, transform(lhs), transform(rhs))
+ case AssignOrNamedArg(lhs, rhs) =>
+ treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs))
case If(cond, thenp, elsep) =>
treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
case Match(selector, cases) =>
@@ -1353,6 +1390,8 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
// vparams => body where vparams:List[ValDef]
case Assign(lhs, rhs) =>
// lhs = rhs
+ case AssignOrNamedArg(lhs, rhs) => (eliminated by typer, resurrected by reifier)
+ // @annotation(lhs = rhs)
case If(cond, thenp, elsep) =>
// if (cond) thenp else elsep
case Match(selector, cases) =>
diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala
index 4b959649fd..8a91956320 100755
--- a/src/library/scala/reflect/api/Types.scala
+++ b/src/library/scala/reflect/api/Types.scala
@@ -6,7 +6,6 @@ trait Types { self: Universe =>
/** This class declares operations that are visible in a Type.
*/
abstract class AbsType {
-
/** The type symbol associated with the type, or `NoSymbol` for types
* that do not refer to a type symbol.
*/
@@ -20,7 +19,7 @@ trait Types { self: Universe =>
/** The collection of declarations in this type
*/
- def allDeclarations: Iterable[Symbol]
+ def declarations: Iterable[Symbol]
/** The member with given name, either directly declared or inherited,
* an OverloadedSymbol if several exist, NoSymbol if none exist.
@@ -36,7 +35,7 @@ trait Types { self: Universe =>
* Members appear in the linearization order of their owners.
* Members with the same owner appear in reverse order of their declarations.
*/
- def allMembers: Iterable[Symbol]
+ def members: Iterable[Symbol]
/** An iterable containing all non-private members of this type (directly declared or inherited)
* Members appear in the linearization order of their owners.
@@ -47,7 +46,7 @@ trait Types { self: Universe =>
/** Substitute types in `to` for corresponding occurrences of references to
* symbols `from` in this type.
*/
- def subst(from: List[Symbol], to: List[Type]): Type
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type // !!! Too many things with names like "subst"
/** If this is a parameterized types, the type arguments.
* Otherwise the empty list
@@ -56,7 +55,7 @@ trait Types { self: Universe =>
/** Is this type a type constructor that is missing its type arguments?
*/
- def isHigherKinded: Boolean
+ def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no?
/**
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
@@ -66,7 +65,7 @@ trait Types { self: Universe =>
* TypeRef(pre, <List>, List()) is replaced by
* PolyType(X, TypeRef(pre, <List>, List(X)))
*/
- def normalize: Type
+ def normalize: Type // !!! Alternative name? "normalize" is used to mean too many things.
/** Does this type conform to given type argument `that`? */
def <:< (that: Type): Boolean
@@ -74,11 +73,11 @@ trait Types { self: Universe =>
/** Is this type equivalent to given type argument `that`? */
def =:= (that: Type): Boolean
- /** The list of all baseclasses of this type (including its own typeSymbol)
+ /** The list of all base classes of this type (including its own typeSymbol)
* in reverse linearization order, starting with the class itself and ending
* in class Any.
*/
- def baseClasses: List[Symbol]
+ def baseClasses: List[Symbol] // !!! Alternative name, perhaps linearization?
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -104,9 +103,9 @@ trait Types { self: Universe =>
def asSeenFrom(pre: Type, clazz: Symbol): Type
/** The erased type corresponding to this type after
- * all transcformations from Scala to Java have been performed.
+ * all transformations from Scala to Java have been performed.
*/
- def erasedType: Type
+ def erasedType: Type // !!! "erasedType", compare with "widen" (so "erase") or "underlying" (so "erased")
/** Apply `f` to each part of this type, returning
* a new type. children get mapped before their parents */
@@ -125,19 +124,23 @@ trait Types { self: Universe =>
/** Does this type contain a reference to given symbol? */
def contains(sym: Symbol): Boolean
- }
- /** This class declares methods that are visible in a `SingleType`.
- */
- trait AbsSingletonType extends AbsType {
+ /** If this is a compound type, the list of its parent types;
+ * otherwise the empty list
+ */
+ def parents: List[Type]
- /** The type underlying a singleton type */
+ /** If this is a singleton type, returns the type underlying it;
+ * otherwise returns this type itself.
+ */
def underlying: Type
- /** Widen from singleton type to its underlying non-singleton
- * base type by applying one or more `underlying` dereferences,
- * identity for all other types.
+ /** If this is a singleton type, widen it to its nearest underlying non-singleton
+ * base type by applying one or more `underlying` dereferences.
+ * If this is not a singleton type, returns this type itself.
*
+ * Example:
+ *
* class Outer { class C ; val x: C }
* val o: Outer
* <o.x.type>.widen = o.C
@@ -145,19 +148,6 @@ trait Types { self: Universe =>
def widen: Type
}
- /** This class declares methods that are visible in a `CompoundType` (i.e.
- * a class/trait/object template or refined type of the form
- * {{{
- * P_1 with ... with P_m { D_1; ...; D_n }
- * }}}
- * P_n
- */
- trait AbsCompoundType extends AbsType {
-
- /** The list of parent types of this compound type */
- def parents: List[Type]
- }
-
/** The type of Scala types, and also Scala type signatures.
* (No difference is internally made between the two).
*/
@@ -293,7 +283,7 @@ trait Types { self: Universe =>
/** A subtype of Type representing refined types as well as `ClassInfo` signatures.
*/
- type CompoundType <: /*AbsCompoundType with*/ Type
+ type CompoundType <: Type
/** The `RefinedType` type defines types of any of the forms on the left,
* with their RefinedType representations to the right.
@@ -409,11 +399,6 @@ trait Types { self: Universe =>
def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
}
-
-
-
-
-
abstract class NullaryMethodTypeExtractor {
def apply(resultType: Type): NullaryMethodType
def unapply(tpe: NullaryMethodType): Option[(Type)]
diff --git a/src/library/scala/reflect/api/Universe.scala b/src/library/scala/reflect/api/Universe.scala
index 03acbdda2c..a3cec3271b 100755
--- a/src/library/scala/reflect/api/Universe.scala
+++ b/src/library/scala/reflect/api/Universe.scala
@@ -10,7 +10,8 @@ abstract class Universe extends Symbols
with Positions
with TreePrinters
with AnnotationInfos
- with StandardDefinitions {
+ with StandardDefinitions
+ with StandardNames {
type Position
val NoPosition: Position
diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala
new file mode 100644
index 0000000000..ebbd4735e5
--- /dev/null
+++ b/src/library/scala/reflect/macro/Context.scala
@@ -0,0 +1,36 @@
+package scala.reflect
+package macro
+
+trait Context extends api.Universe {
+
+ /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+ */
+ def captureVariable(vble: Symbol): Unit
+
+ /** Mark given identifier as a reference to a captured variable itself
+ * suppressing dereferencing with the `elem` field.
+ */
+ def referenceCapturedVariable(id: Ident): Tree
+
+ /** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type.
+ * For instance, given the abstract syntax tree representation of the `x + 1` expression:
+ *
+ * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1))))
+ *
+ * The reifier transforms it to the following tree:
+ *
+ * $mr.Apply($mr.Select($mr.Ident($mr.newFreeVar("x", <Int>, x), "+"), List($mr.Literal($mr.Constant(1))))))
+ *
+ * The transformation looks mostly straightforward, but it has its tricky parts:
+ * * Reifier retains symbols and types defined outside the reified tree, however
+ * locally defined entities get erased and replaced with their original trees
+ * * Free variables are detected and wrapped in symbols of the type FreeVar
+ * * Mutable variables that are accessed from a local function are wrapped in refs
+ * * Since reified trees can be compiled outside of the scope they've been created in,
+ * special measures are taken to ensure that all freeVars remain visible
+ *
+ * Typical usage of this function is to retain some of the trees received/created by a macro
+ * into the form that can be inspected (via pattern matching) or compiled/run (by a reflective ToolBox) during the runtime.
+ */
+ def reify(tree: Tree): Tree
+}
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 62592baa27..1c3e618520 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -8,7 +8,7 @@ package object reflect {
// initialization, but in response to a doomed attempt to utilize it.
lazy val mirror: api.Mirror = {
// we use (Java) reflection here so that we can keep reflect.runtime and reflect.internals in a seperate jar
- ReflectionUtils.singletonInstance("scala.reflect.runtime.Mirror") collect { case x: api.Mirror => x } getOrElse {
+ ReflectionUtils.singletonInstanceOpt("scala.reflect.runtime.Mirror") collect { case x: api.Mirror => x } getOrElse {
throw new UnsupportedOperationException("Scala reflection not available on this platform")
}
}
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a9e5e90e20..b2f336fe52 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -9,6 +9,6 @@
package scala.runtime
-abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] {
}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index f48d99f5af..cbe778f09b 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -26,7 +26,7 @@ abstract class AbstractPartialFunction[-T1, +R]
private var fallBackField: PartialFunction[T1 @uncheckedVariance, R @uncheckedVariance] = _
def fallBack: PartialFunction[T1, R] = synchronized {
- if (fallBackField == null) fallBackField = PartialFunction.empty
+ if (fallBackField eq null) fallBackField = PartialFunction.empty
fallBackField
}
@@ -38,7 +38,7 @@ abstract class AbstractPartialFunction[-T1, +R]
override def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = {
val result = this.clone.asInstanceOf[AbstractPartialFunction[A1, B1]]
result.synchronized {
- result.fallBackField = this.fallBackField orElse that
+ result.fallBackField = if (this.fallBackField eq null) that else this.fallBackField orElse that
result
}
}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index c726c56d0e..b19c8d086c 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -769,6 +769,24 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
+
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ if (arg instanceof java.lang.Boolean)
+ return true;
+ else
+ return isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
/** arg.toChar */
public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 951bdd888e..8bc63ae3a0 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -36,7 +36,16 @@ object ScalaRunTime {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
case _ => false
}
- private val tupleNames = 1 to 22 map ("scala.Tuple" + _) toSet
+ // Avoiding boxing which messes up the specialized tests. Don't ask.
+ private val tupleNames = {
+ var i = 22
+ var names: List[String] = Nil
+ while (i >= 1) {
+ names ::= ("scala.Tuple" + String.valueOf(i))
+ i -= 1
+ }
+ names.toSet
+ }
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index 902faa166e..b24474f35d 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-
-
package scala
+import Specializable._
+
/** Annotate type parameters on which code should be automatically
* specialized. For example:
* {{{
@@ -24,8 +24,9 @@ package scala
*
* @since 2.8
*/
-class specialized(types: SpecializableCompanion*) extends annotation.StaticAnnotation {
- def this() {
- this(Unit, Boolean, Byte, Short, Char, Int, Long, Float, Double)
- }
+// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation {
+
+class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation {
+ def this(types: Specializable*) = this(new Group(types.toList))
+ def this() = this(Everything)
}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 44e573896f..5b7244e98e 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -13,15 +13,25 @@ import processInternal._
import java.io.{ BufferedReader, InputStreamReader, FilterInputStream, FilterOutputStream }
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.immutable.Stream
+import scala.annotation.tailrec
/**
* This object contains factories for [[scala.sys.process.ProcessIO]],
* which can be used to control the I/O of a [[scala.sys.process.Process]]
* when a [[scala.sys.process.ProcessBuilder]] is started with the `run`
* command.
+ *
+ * It also contains some helper methods that can be used to in the creation of
+ * `ProcessIO`.
+ *
+ * It is used by other classes in the package in the implementation of various
+ * features, but can also be used by client code.
*/
object BasicIO {
+ /** Size of the buffer used in all the functions that copy data */
final val BufferSize = 8192
+
+ /** Used to separate lines in the `processFully` function that takes `Appendable`. */
final val Newline = props("line.separator")
private[process] final class Streamed[T](
@@ -52,15 +62,70 @@ object BasicIO {
def protect(out: OutputStream): OutputStream = if ((out eq stdout) || (out eq stderr)) Uncloseable(out) else out
}
+ /** Creates a `ProcessIO` from a function `String => Unit`. It can attach the
+ * process input to stdin, and it will either send the error stream to
+ * stderr, or to a `ProcessLogger`.
+ *
+ * For example, the `ProcessIO` created below will print all normal output
+ * while ignoring all error output. No input will be provided.
+ * {{{
+ * import scala.sys.process.BasicIO
+ * val errToDevNull = BasicIO(false, println(_), None)
+ * }}}
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param output A function that will be called with the process output.
+ * @param log An optional `ProcessLogger` to which the output should be
+ * sent. If `None`, output will be sent to stderr.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, output: String => Unit, log: Option[ProcessLogger]) =
new ProcessIO(input(withIn), processFully(output), getErr(log))
+ /** Creates a `ProcessIO` that appends its output to a `StringBuffer`. It can
+ * attach the process input to stdin, and it will either send the error
+ * stream to stderr, or to a `ProcessLogger`.
+ *
+ * For example, the `ProcessIO` created by the function below will store the
+ * normal output on the buffer provided, and print all error on stderr. The
+ * input will be read from stdin.
+ * {{{
+ * import scala.sys.process.{BasicIO, ProcessLogger}
+ * val printer = ProcessLogger(println(_))
+ * def appendToBuffer(b: StringBuffer) = BasicIO(true, b, Some(printer))
+ * }}}
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param buffer A `StringBuffer` which will receive the process normal
+ * output.
+ * @param log An optional `ProcessLogger` to which the output should be
+ * sent. If `None`, output will be sent to stderr.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, buffer: StringBuffer, log: Option[ProcessLogger]) =
new ProcessIO(input(withIn), processFully(buffer), getErr(log))
+ /** Creates a `ProcessIO` from a `ProcessLogger` . It can attach the
+ * process input to stdin.
+ *
+ * @param withIn True if the process input should be attached to stdin.
+ * @param log A `ProcessLogger` to receive all output, normal and error.
+ * @return A `ProcessIO` with the characteristics above.
+ */
def apply(withIn: Boolean, log: ProcessLogger) =
new ProcessIO(input(withIn), processOutFully(log), processErrFully(log))
+ /** Returns a function `InputStream => Unit` given an optional
+ * `ProcessLogger`. If no logger is passed, the function will send the output
+ * to stderr. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]].
+ *
+ * @param log An optional `ProcessLogger` to which the contents of
+ * the `InputStream` will be sent.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]]) which will send the data to
+ * either the provided `ProcessLogger` or, if `None`, to stderr.
+ */
def getErr(log: Option[ProcessLogger]) = log match {
case Some(lg) => processErrFully(lg)
case None => toStdErr
@@ -69,13 +134,40 @@ object BasicIO {
private def processErrFully(log: ProcessLogger) = processFully(log err _)
private def processOutFully(log: ProcessLogger) = processFully(log out _)
+ /** Closes a `Closeable` without throwing an exception */
def close(c: Closeable) = try c.close() catch { case _: IOException => () }
+
+ /** Returns a function `InputStream => Unit` that appends all data read to the
+ * provided `Appendable`. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]]. The buffer will be appended line by line.
+ *
+ * @param buffer An `Appendable` such as `StringBuilder` or `StringBuffer`.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]] which will append all data read
+ * from the stream to the buffer.
+ */
def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer))
+
+ /** Returns a function `InputStream => Unit` that will call the passed
+ * function with all data read. This function can be used to create a
+ * [[scala.sys.process.ProcessIO]]. The `processLine` function will be called
+ * with each line read, and `Newline` will be appended after each line.
+ *
+ * @param processLine A function that will be called with all data read from
+ * the stream.
+ * @return A function `InputStream => Unit` (used by
+ * [[scala.sys.process.ProcessIO]] which will call `processLine`
+ * with all data read from the stream.
+ */
def processFully(processLine: String => Unit): InputStream => Unit = in => {
val reader = new BufferedReader(new InputStreamReader(in))
processLinesFully(processLine)(reader.readLine)
+ reader.close()
}
+ /** Calls `processLine` with the result of `readLine` until the latter returns
+ * `null`.
+ */
def processLinesFully(processLine: String => Unit)(readLine: () => String) {
def readFully() {
val line = readLine()
@@ -86,14 +178,38 @@ object BasicIO {
}
readFully()
}
- def connectToIn(o: OutputStream): Unit = transferFully(stdin, o)
- def input(connect: Boolean): OutputStream => Unit = if (connect) connectToIn else _ => ()
+
+ /** Copy contents of stdin to the `OutputStream`. */
+ def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o)
+
+ /** Returns a function `OutputStream => Unit` that either reads the content
+ * from stdin or does nothing. This function can be used by
+ * [[scala.sys.process.ProcessIO]].
+ */
+ def input(connect: Boolean): OutputStream => Unit = { outputToProcess =>
+ if (connect) connectToIn(outputToProcess)
+ outputToProcess.close()
+ }
+
+ /** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */
def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput))
+
+ /** Retruns a `ProcessIO` connected to stdout, stderr and the provided `in` */
def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr)
+ /** Send all the input from the stream to stderr, and closes the input stream
+ * afterwards.
+ */
def toStdErr = (in: InputStream) => transferFully(in, stderr)
+
+ /** Send all the input from the stream to stdout, and closes the input stream
+ * afterwards.
+ */
def toStdOut = (in: InputStream) => transferFully(in, stdout)
+ /** Copy all input from the input stream to the output stream. Closes the
+ * input stream once it's all read.
+ */
def transferFully(in: InputStream, out: OutputStream): Unit =
try transferFullyImpl(in, out)
catch onInterrupt(())
@@ -105,13 +221,14 @@ object BasicIO {
private[this] def transferFullyImpl(in: InputStream, out: OutputStream) {
val buffer = new Array[Byte](BufferSize)
- def loop() {
+ @tailrec def loop() {
val byteCount = in.read(buffer)
if (byteCount > 0) {
out.write(buffer, 0, byteCount)
- out.flush()
- loop()
- }
+ // flush() will throw an exception once the process has terminated
+ val available = try { out.flush(); true } catch { case _: IOException => false }
+ if (available) loop() else in.close()
+ } else in.close()
}
loop()
}
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index b8765aa615..c2a61af936 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -13,7 +13,7 @@ import processInternal._
import ProcessBuilder._
/** Represents a process that is running or has finished running.
- * It may be a compound process with several underlying native processes (such as 'a #&& b`).
+ * It may be a compound process with several underlying native processes (such as `a #&& b`).
*
* This trait is often not used directly, though its companion object contains
* factories for [[scala.sys.process.ProcessBuilder]], the main component of this
@@ -42,28 +42,28 @@ object Process extends ProcessImpl with ProcessCreation { }
* found on and used through [[scala.sys.process.Process]]'s companion object.
*/
trait ProcessCreation {
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `String`, including the
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String`, including the
* parameters.
*
* @example {{{ apply("cat file.txt") }}}
*/
def apply(command: String): ProcessBuilder = apply(command, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`,
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`,
* where the head is the command and each element of the tail is a parameter.
*
* @example {{{ apply("cat" :: files) }}}
*/
def apply(command: Seq[String]): ProcessBuilder = apply(command, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`,
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`,
* and a sequence of `String` representing the arguments.
*
* @example {{{ apply("cat", files) }}}
*/
def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java", new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}}
@@ -71,7 +71,7 @@ trait ProcessCreation {
def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java" :: javaArgs, new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}}
@@ -79,7 +79,7 @@ trait ProcessCreation {
def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
@@ -93,7 +93,7 @@ trait ProcessCreation {
}*/
}
- /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
+ /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
@@ -105,7 +105,7 @@ trait ProcessCreation {
apply(jpb)
}
- /** create a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`.
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`.
*
* @example {{{
* apply((new java.lang.ProcessBuilder("ls", "-l")) directory new java.io.File(System.getProperty("user.home")))
@@ -113,19 +113,19 @@ trait ProcessCreation {
*/
def apply(builder: JProcessBuilder): ProcessBuilder = new Simple(builder)
- /** create a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This
* `ProcessBuilder` can then be used as a `Source` or a `Sink`, so one can
* pipe things from and to it.
*/
def apply(file: File): FileBuilder = new FileImpl(file)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This
* `ProcessBuilder` can then be used as a `Source`, so that one can pipe things
* from it.
*/
def apply(url: URL): URLBuilder = new URLImpl(url)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
* This can be used as a way to template strings.
*
* @example {{{
@@ -134,23 +134,23 @@ trait ProcessCreation {
*/
def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
* to force an exit value.
*/
def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a `String` name and a
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String` name and a
* `Boolean`. This can be used to force an exit value, with the name being
* used for `toString`.
*/
def apply(name: String, exitValue: => Int): ProcessBuilder = new Dummy(name, exitValue)
- /** Create a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of
+ /** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of
* something else for which there's an implicit conversion to `Source`.
*/
def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert)
- /** Create a [[scala.sys.process.ProcessBuilder]] from one or more
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from one or more
* [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
@@ -170,7 +170,7 @@ trait ProcessCreation {
*/
def cat(file: Source, files: Source*): ProcessBuilder = cat(file +: files)
- /** Create a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence
+ /** Creates a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence
* of [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
@@ -198,18 +198,41 @@ trait ProcessImplicits {
/** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */
implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder)
- /** Implicitly convert a `java.io.File` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `java.io.File` into a
+ * [[scala.sys.process.ProcessBuilder.FileBuilder]], which can be used as
+ * either input or output of a process. For example:
+ * {{{
+ * import scala.sys.process._
+ * "ls" #> new java.io.File("dirContents.txt") !
+ * }}}
+ */
implicit def fileToProcess(file: File): FileBuilder = apply(file)
- /** Implicitly convert a `java.net.URL` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `java.net.URL` into a
+ * [[scala.sys.process.ProcessBuilder.URLBuilder]] , which can be used as
+ * input to a process. For example:
+ * {{{
+ * import scala.sys.process._
+ * Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") !
+ * }}}
+ */
implicit def urlToProcess(url: URL): URLBuilder = apply(url)
- /** Implicitly convert a [[scala.xml.Elem]] into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a [[scala.xml.Elem]] into a
+ * [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
+ * elements of the element, trimming spaces, and then converting the result
+ * from string to a process. Importantly, tags are completely ignored, so
+ * they cannot be used to separate parameters.
+ */
implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command)
- /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */
implicit def stringToProcess(command: String): ProcessBuilder = apply(command)
- /** Implicitly convert a sequence of `String` into a [[scala.sys.process.ProcessBuilder]] */
+ /** Implicitly convert a sequence of `String` into a
+ * [[scala.sys.process.ProcessBuilder]]. The first argument will be taken to
+ * be the command to be executed, and the remaining will be its arguments.
+ * When using this, arguments may contain spaces.
+ */
implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command)
}
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index 214d908012..20270d423f 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -12,133 +12,265 @@ package process
import processInternal._
import ProcessBuilder._
-/** Represents a runnable process.
+/** Represents a sequence of one or more external processes that can be
+ * executed. A `ProcessBuilder` can be a single external process, or a
+ * combination of other `ProcessBuilder`. One can control where a
+ * the output of an external process will go to, and where its input will come
+ * from, or leave that decision to whoever starts it.
*
- * This is the main component of this package. A `ProcessBuilder` may be composed with
- * others, either concatenating their outputs or piping them from one to the next, and
- * possibly with conditional execution depending on the last process exit value.
+ * One creates a `ProcessBuilder` through factories provided in
+ * [[scala.sys.process.Process]]'s companion object, or implicit conversions
+ * based on these factories made available in the package object
+ * [[scala.sys.process]]. Here are some examples:
+ * {{{
+ * import.scala.sys.process._
*
- * Once executed, one can retrieve the output or redirect it to a
- * [[scala.sys.process.ProcessLogger]], or one can get the exit value, discarding or
- * redirecting the output.
+ * // Executes "ls" and sends output to stdout
+ * "ls".!
*
- * One creates a `ProcessBuilder` through factories provided in [[scala.sys.process.Process]]'s
- * companion object, or implicit conversions based on these factories made available in the
- * package object [[scala.sys.process]].
+ * // Execute "ls" and assign a `Stream[String]` of its output to "contents".
+ * // Because [[scala.Predef]] already defines a `lines` method for `String`,
+ * // we use [[scala.sys.process.Process]]'s object companion to create it.
+ * val contents = Process("ls").lines
*
- * Let's examine in detail one example of usage:
+ * // Here we use a `Seq` to make the parameter whitespace-safe
+ * def contentsOf(dir: String): String = Seq("ls", dir).!!
+ * }}}
+ *
+ * The methods of `ProcessBuilder` are divided in three categories: the ones that
+ * combine two `ProcessBuilder` to create a third, the ones that redirect input
+ * or output of a `ProcessBuilder`, and the ones that execute
+ * the external processes associated with it.
+ *
+ * ==Combining `ProcessBuilder`==
+ *
+ * Two existing `ProcessBuilder` can be combined in the following ways:
+ *
+ * * They can be executed in parallel, with the output of the first being fed
+ * as input to the second, like Unix pipes. This is achieved with the `#|`
+ * method.
+ * * They can be executed in sequence, with the second starting as soon as
+ * the first ends. This is done by the `###` method.
+ * * The execution of the second one can be conditioned by the return code
+ * (exit status) of the first, either only when it's zero, or only when it's
+ * not zero. The methods `#&&` and `#||` accomplish these tasks.
+ *
+ * ==Redirecting Input/Output==
+ *
+ * Though control of input and output can be done when executing the process,
+ * there's a few methods that create a new `ProcessBuilder` with a
+ * pre-configured input or output. They are `#<`, `#>` and `#>>`, and may take
+ * as input either another `ProcessBuilder` (like the pipe described above), or
+ * something else such as a `java.io.File` or a `java.lang.InputStream`.
+ * For example:
+ * {{{
+ * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") !
+ * }}}
+ *
+ * ==Starting Processes==
+ *
+ * To execute all external commands associated with a `ProcessBuilder`, one
+ * may use one of four groups of methods. Each of these methods have various
+ * overloads and variations to enable further control over the I/O. These
+ * methods are:
+ *
+ * * `run`: the most general method, it returns a
+ * [[scala.sys.process.Process]] immediately, and the external command
+ * executes concurrently.
+ * * `!`: blocks until all external commands exit, and returns the exit code
+ * of the last one in the chain of execution.
+ * * `!!`: blocks until all external commands exit, and returns a `String`
+ * with the output generated.
+ * * `lines`: returns immediately like `run`, and the output being generared
+ * is provided through a `Stream[String]`. Getting the next element of that
+ * `Stream` may block until it becomes available. This method will throw an
+ * exception if the return code is different than zero -- if this is not
+ * desired, use the `lines_!` method.
+ *
+ * ==Handling Input and Output==
+ *
+ * If not specified, the input of the external commands executed with `run` or
+ * `!` will not be tied to anything, and the output will be redirected to the
+ * stdout and stderr of the Scala process. For the methods `!!` and `lines`, no
+ * input will be provided, and the output will be directed according to the
+ * semantics of these methods.
*
+ * Some methods will cause stdin to be used as input. Output can be controlled
+ * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lines` will only
+ * redirect error output when passed a `ProcessLogger`. If one desires full
+ * control over input and output, then a [[scala.sys.process.ProcessIO]] can be
+ * used with `run`.
+ *
+ * For example, we could silence the error output from `lines_!` like this:
+ * {{{
+ * val etcFiles = "find /etc" lines_! ProcessLogger(line => ())
+ * }}}
+ *
+ * ==Extended Example==
+ *
+ * Let's examine in detail one example of usage:
* {{{
* import scala.sys.process._
* "find src -name *.scala -exec grep null {} ;" #| "xargs test -z" #&& "echo null-free" #|| "echo null detected" !
* }}}
- *
* Note that every `String` is implicitly converted into a `ProcessBuilder`
* through the implicits imported from [[scala.sys.process]]. These `ProcessBuilder` are then
* combined in three different ways.
*
* 1. `#|` pipes the output of the first command into the input of the second command. It
- * mirrors a shell pipe (`|`).
- * 2. `#&&` conditionally executes the second command if the previous one finished with
- * exit value 0. It mirrors shell's `&&`.
- * 3. `#||` conditionally executes the third command if the exit value of the previous
- * command is different than zero. It mirrors shell's `&&`.
- *
- * Not shown here, the equivalent of a shell's `;` would be `###`. The reason for this name is
- * that `;` is a reserved token in Scala.
- *
- * Finally, `!` at the end executes the commands, and returns the exit value. If the output
- * was desired instead, one could run that with `!!` instead.
- *
- * If one wishes to execute the commands in background, one can either call `run`, which
- * returns a [[scala.sys.process.Process]] from which the exit value can be obtained, or
- * `lines`, which returns a [scala.collection.immutable.Stream] of output lines. This throws
- * an exception at the end of the `Stream` is the exit value is non-zero. To avoid exceptions,
- * one can use `lines_!` instead.
- *
- * One can also start the commands in specific ways to further control their I/O. Using `!<` to
- * start the commands will use the stdin from the current process for them. All methods can
- * be used passing a [[scala.sys.process.ProcessLogger]] to capture the output, both stderr and
- * stdout. And, when using `run`, one can pass a [[scala.sys.process.ProcessIO]] to control
- * stdin, stdout and stderr.
- *
- * The stdin of a command can be redirected from a `java.io.InputStream`, a `java.io.File`, a
- * `java.net.URL` or another `ProcessBuilder` through the method `#<`. Likewise, the stdout
- * can be sent to a `java.io.OutputStream`, a `java.io.File` or another `ProcessBuilder` with
- * the method `#>`. The method `#>>` can be used to append the output to a `java.io.File`.
- * For example:
+ * mirrors a shell pipe (`|`).
+ * 1. `#&&` conditionally executes the second command if the previous one finished with
+ * exit value 0. It mirrors shell's `&&`.
+ * 1. `#||` conditionally executes the third command if the exit value of the previous
+ * command is different than zero. It mirrors shell's `&&`.
*
- * {{{
- * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") !
- * }}}
+ * Finally, `!` at the end executes the commands, and returns the exit value.
+ * Whatever is printed will be sent to the Scala process standard output. If
+ * we wanted to caputre it, we could run that with `!!` instead.
+ *
+ * Note: though it is not shown above, the equivalent of a shell's `;` would be
+ * `###`. The reason for this name is that `;` is a reserved token in Scala.
*/
trait ProcessBuilder extends Source with Sink {
- /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
- * sent to the console. If the exit code is non-zero, an exception is thrown.*/
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the console. If
+ * the exit code is non-zero, an exception is thrown.
+ */
def !! : String
- /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is
- * sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the provided
+ * ProcessLogger. If the exit code is non-zero, an exception is thrown.
+ */
def !!(log: ProcessLogger): String
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination and then throw an exception. */
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the console. If
+ * the exit code is non-zero, an exception is thrown. The newly started
+ * process reads from standard input of the current process.
+ */
+ def !!< : String
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the output as a String. Standard error is sent to the provided
+ * ProcessLogger. If the exit code is non-zero, an exception is thrown. The
+ * newly started process reads from standard input of the current process.
+ */
+ def !!<(log: ProcessLogger): String
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the console. If the process exits
+ * with a non-zero value, the Stream will provide all lines up to termination
+ * and then throw an exception.
+ */
def lines: Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the provided ProcessLogger. If the
+ * process exits with a non-zero value, the Stream will provide all lines up
+ * to termination but will not throw an exception.
+ */
def lines(log: ProcessLogger): Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the console. If the process exits
+ * with a non-zero value, the Stream will provide all lines up to termination
+ * but will not throw an exception.
+ */
def lines_! : Stream[String]
- /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available
- * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value,
- * the Stream will provide all lines up to termination but will not throw an exception. */
+
+ /** Starts the process represented by this builder. The output is returned as
+ * a Stream that blocks when lines are not available but the process has not
+ * completed. Standard error is sent to the provided ProcessLogger. If the
+ * process exits with a non-zero value, the Stream will provide all lines up
+ * to termination but will not throw an exception.
+ */
def lines_!(log: ProcessLogger): Stream[String]
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the console.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the console.
+ */
def ! : Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger.
+ */
def !(log: ProcessLogger): Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the console. The newly started process reads from standard input of the current process.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the console.
+ * The newly started process reads from standard input of the current process.
+ */
def !< : Int
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger. The newly started process reads from standard input of the current process.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger. The newly started process reads from standard input of the
+ * current process.
+ */
def !<(log: ProcessLogger): Int
- /** Starts the process represented by this builder. Standard output and error are sent to the console.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the console.*/
def run(): Process
- /** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the given ProcessLogger.
+ */
def run(log: ProcessLogger): Process
- /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/
+
+ /** Starts the process represented by this builder. I/O is handled by the
+ * given ProcessIO instance.
+ */
def run(io: ProcessIO): Process
- /** Starts the process represented by this builder. Standard output and error are sent to the console.
- * The newly started process reads from standard input of the current process if `connectInput` is true.*/
+
+ /** Starts the process represented by this builder. Standard output and error
+ * are sent to the console. The newly started process reads from standard
+ * input of the current process if `connectInput` is true.
+ */
def run(connectInput: Boolean): Process
- /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are
- * sent to the given ProcessLogger.
- * The newly started process reads from standard input of the current process if `connectInput` is true.*/
+
+ /** Starts the process represented by this builder, blocks until it exits, and
+ * returns the exit code. Standard output and error are sent to the given
+ * ProcessLogger. The newly started process reads from standard input of the
+ * current process if `connectInput` is true.
+ */
def run(log: ProcessLogger, connectInput: Boolean): Process
- /** Constructs a command that runs this command first and then `other` if this command succeeds.*/
+ /** Constructs a command that runs this command first and then `other` if this
+ * command succeeds.
+ */
def #&& (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/
+
+ /** Constructs a command that runs this command first and then `other` if this
+ * command does not succeed.
+ */
def #|| (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/
+
+ /** Constructs a command that will run this command and pipes the output to
+ * `other`. `other` must be a simple command.
+ */
def #| (other: ProcessBuilder): ProcessBuilder
- /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/
+
+ /** Constructs a command that will run this command and then `other`. The
+ * exit code will be the exit code of `other`.
+ */
def ### (other: ProcessBuilder): ProcessBuilder
- /** True if this command can be the target of a pipe.
- */
+
+ /** True if this command can be the target of a pipe. */
def canPipeTo: Boolean
- /** True if this command has an exit code which should be propagated to the user.
- * Given a pipe between A and B, if B.hasExitValue is true then the exit code will
- * be the one from B; if it is false, the one from A. This exists to prevent output
- * redirections (implemented as pipes) from masking useful process error codes.
- */
+ /** True if this command has an exit code which should be propagated to the
+ * user. Given a pipe between A and B, if B.hasExitValue is true then the
+ * exit code will be the one from B; if it is false, the one from A. This
+ * exists to prevent output redirections (implemented as pipes) from masking
+ * useful process error codes.
+ */
def hasExitValue: Boolean
}
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index 261e837a4d..fa0674670f 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -11,14 +11,40 @@ package process
import processInternal._
-/** This class is used to control the I/O of every [[scala.sys.process.ProcessBuilder]].
- * Most of the time, there is no need to interact with `ProcessIO` directly. However, if
- * fine control over the I/O of a `ProcessBuilder` is desired, one can use the factories
- * on [[scala.sys.process.BasicIO]] stand-alone object to create one.
- *
- * Each method will be called in a separate thread.
- * If daemonizeThreads is true, they will all be marked daemon threads.
- */
+/** This class is used to control the I/O of every
+ * [[scala.sys.process.Process]]. The functions used to create it will be
+ * called with the process streams once it has been started. It might not be
+ * necessary to use `ProcessIO` directly --
+ * [[scala.sys.process.ProcessBuilder]] can return the process output to the
+ * caller, or use a [[scala.sys.process.ProcessLogger]] which avoids direct
+ * interaction with a stream. One can even use the factories at `BasicIO` to
+ * create a `ProcessIO`, or use its helper methods when creating one's own
+ * `ProcessIO`.
+ *
+ * When creating a `ProcessIO`, it is important to ''close all streams'' when
+ * finished, since the JVM might use system resources to capture the process
+ * input and output, and will not release them unless the streams are
+ * explicitly closed.
+ *
+ * `ProcessBuilder` will call `writeInput`, `processOutput` and `processError`
+ * in separate threads, and if daemonizeThreads is true, they will all be
+ * marked as daemon threads.
+ *
+ * @param writeInput Function that will be called with the `OutputStream` to
+ * which all input to the process must be written. This will
+ * be called in a newly spawned thread.
+ * @param processOutput Function that will be called with the `InputStream`
+ * from which all normal output of the process must be
+ * read from. This will be called in a newly spawned
+ * thread.
+ * @param processError Function that will be called with the `InputStream` from
+ * which all error output of the process must be read from.
+ * This will be called in a newly spawned thread.
+ * @param daemonizeThreads Indicates whether the newly spawned threads that
+ * will run `processOutput`, `processError` and
+ * `writeInput` should be marked as daemon threads.
+ * @note Failure to close the passed streams may result in resource leakage.
+ */
final class ProcessIO(
val writeInput: OutputStream => Unit,
val processOutput: InputStream => Unit,
@@ -27,8 +53,15 @@ final class ProcessIO(
) {
def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, false)
+ /** Creates a new `ProcessIO` with a different handler for the process input. */
def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, daemonizeThreads)
+
+ /** Creates a new `ProcessIO` with a different handler for the normal output. */
def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, daemonizeThreads)
+
+ /** Creates a new `ProcessIO` with a different handler for the error output. */
def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, daemonizeThreads)
+
+ /** Creates a new `ProcessIO`, with `daemonizeThreads` true. */
def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, true)
}
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index 67146dd70e..a8241db53c 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -11,12 +11,26 @@ package process
import java.io._
-/** Encapsulates the output and error streams of a running process.
- * Many of the methods of `ProcessBuilder` accept a `ProcessLogger` as
- * an argument.
- *
- * @see [[scala.sys.process.ProcessBuilder]]
- */
+/** Encapsulates the output and error streams of a running process. This is used
+ * by [[scala.sys.process.ProcessBuilder]] when starting a process, as an
+ * alternative to [[scala.sys.process.ProcessIO]], which can be more difficult
+ * to use. Note that a `ProcessLogger` will be used to create a `ProcessIO`
+ * anyway. The object `BasicIO` has some functions to do that.
+ *
+ * Here is an example that counts the number of lines in the normal and error
+ * output of a process:
+ * {{{
+ * import scala.sys.process._
+ *
+ * var normalLines = 0
+ * var errorLines = 0
+ * val countLogger = ProcessLogger(line => normalLines += 1,
+ * line => errorLines += 1)
+ * "find /etc" ! countLogger
+ * }}}
+ *
+ * @see [[scala.sys.process.ProcessBuilder]]
+ */
trait ProcessLogger {
/** Will be called with each line read from the process output stream.
*/
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 3eb0e5bb89..c1bf470831 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -11,40 +11,175 @@
// for process debugging output.
//
package scala.sys {
- /**
- * This package is used to create process pipelines, similar to Unix command pipelines.
+ /** This package handles the execution of external processes. The contents of
+ * this package can be divided in three groups, according to their
+ * responsibilities:
*
- * The key concept is that one builds a [[scala.sys.process.Process]] that will run and return an exit
- * value. This `Process` is usually composed of one or more [[scala.sys.process.ProcessBuilder]], fed by a
- * [[scala.sys.process.ProcessBuilder.Source]] and feeding a [[scala.sys.process.ProcessBuilder.Sink]]. A
- * `ProcessBuilder` itself is both a `Source` and a `Sink`.
+ * - Indicating what to run and how to run it.
+ * - Handling a process input and output.
+ * - Running the process.
*
- * As `ProcessBuilder`, `Sink` and `Source` are abstract, one usually creates them with `apply` methods on
- * the companion object of [[scala.sys.process.Process]], or through implicit conversions available in this
- * package object from `String` and other types. The pipe is composed through unix-like pipeline and I/O
- * redirection operators available on [[scala.sys.process.ProcessBuilder]].
+ * For simple uses, the only group that matters is the first one. Running an
+ * external command can be as simple as `"ls".!`, or as complex as building a
+ * pipeline of commands such as this:
*
- * The example below shows how to build and combine such commands. It searches for `null` uses in the `src`
- * directory, printing a message indicating whether they were found or not. The first command pipes its
- * output to the second command, whose exit value is then used to choose between the third or fourth
- * commands. This same example is explained in greater detail on [[scala.sys.process.ProcessBuilder]].
+ * {{{
+ * import scala.sys.process._
+ * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines
+ * }}}
+ *
+ * We describe below the general concepts and architecture of the package,
+ * and then take a closer look at each of the categories mentioned above.
+ *
+ * ==Concepts and Architecture==
+ *
+ * The underlying basis for the whole package is Java's `Process` and
+ * `ProcessBuilder` classes. While there's no need to use these Java classes,
+ * they impose boundaries on what is possible. One cannot, for instance,
+ * retrieve a ''process id'' for whatever is executing.
+ *
+ * When executing an external process, one can provide a command's name,
+ * arguments to it, the directory in which it will be executed and what
+ * environment variables will be set. For each executing process, one can
+ * feed its standard input through a `java.io.OutputStream`, and read from
+ * its standard output and standard error through a pair of
+ * `java.io.InputStream`. One can wait until a process finishes execution and
+ * then retrieve its return value, or one can kill an executing process.
+ * Everything else must be built on those features.
+ *
+ * This package provides a DSL for running and chaining such processes,
+ * mimicking Unix shells ability to pipe output from one process to the input
+ * of another, or control the execution of further processes based on the
+ * return status of the previous one.
+ *
+ * In addition to this DSL, this package also provides a few ways of
+ * controlling input and output of these processes, going from simple and
+ * easy to use to complex and flexible.
*
+ * When processes are composed, a new `ProcessBuilder` is created which, when
+ * run, will execute the `ProcessBuilder` instances it is composed of
+ * according to the manner of the composition. If piping one process to
+ * another, they'll be executed simultaneously, and each will be passed a
+ * `ProcessIO` that will copy the output of one to the input of the other.
+ *
+ * ==What to Run and How==
+ *
+ * The central component of the process execution DSL is the
+ * [[scala.sys.process.ProcessBuilder]] trait. It is `ProcessBuilder` that
+ * implements the process execution DSL, that creates the
+ * [[scala.sys.process.Process]] that will handle the execution, and return
+ * the results of such execution to the caller. We can see that DSL in the
+ * introductory example: `#|`, `#&&` and `#!!` are methods on
+ * `ProcessBuilder` used to create a new `ProcessBuilder` through
+ * composition.
+ *
+ * One creates a `ProcessBuilder` either through factories on the
+ * [[scala.sys.process.Process]]'s companion object, or through implicit
+ * conversions available in this package object itself. Implicitly, each
+ * process is created either out of a `String`, with arguments separated by
+ * spaces -- no escaping of spaces is possible -- or out of a
+ * [[scala.collection.Seq]], where the first element represents the command
+ * name, and the remaining elements are arguments to it. In this latter case,
+ * arguments may contain spaces. One can also implicitly convert
+ * [[scala.xml.Elem]] and `java.lang.ProcessBuilder` into a `ProcessBuilder`.
+ * In the introductory example, the strings were converted into
+ * `ProcessBuilder` implicitly.
+ *
+ * To further control what how the process will be run, such as specifying
+ * the directory in which it will be run, see the factories on
+ * [[scala.sys.process.Process]]'s object companion.
+ *
+ * Once the desired `ProcessBuilder` is available, it can be executed in
+ * different ways, depending on how one desires to control its I/O, and what
+ * kind of result one wishes for:
+ *
+ * - Return status of the process (`!` methods)
+ * - Output of the process as a `String` (`!!` methods)
+ * - Continuous output of the process as a `Stream[String]` (`lines` methods)
+ * - The `Process` representing it (`run` methods)
+ *
+ * Some simple examples of these methods:
* {{{
* import scala.sys.process._
- * (
- * "find src -name *.scala -exec grep null {} ;"
- * #| "xargs test -z"
- * #&& "echo null-free" #|| "echo null detected"
- * ) !
+ *
+ * // This uses ! to get the exit code
+ * def fileExists(name: String) = Seq("test", "-f", name).! == 0
+ *
+ * // This uses !! to get the whole result as a string
+ * val dirContents = "ls".!!
+ *
+ * // This "fire-and-forgets" the method, which can be lazily read through
+ * // a Stream[String]
+ * def sourceFilesAt(baseDir: String): Stream[String] = {
+ * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
+ * cmd.lines
+ * }
* }}}
*
- * Other implicits available here are for [[scala.sys.process.ProcessBuilder.FileBuilder]], which extends
- * both `Sink` and `Source`, and for [[scala.sys.process.ProcessBuilder.URLBuilder]], which extends
- * `Source` alone.
+ * We'll see more details about controlling I/O of the process in the next
+ * section.
+ *
+ * ==Handling Input and Output==
+ *
+ * In the underlying Java model, once a `Process` has been started, one can
+ * get `java.io.InputStream` and `java.io.OutpuStream` representing its
+ * output and input respectively. That is, what one writes to an
+ * `OutputStream` is turned into input to the process, and the output of a
+ * process can be read from an `InputStream` -- of which there are two, one
+ * representing normal output, and the other representing error output.
+ *
+ * This model creates a difficulty, which is that the code responsible for
+ * actually running the external processes is the one that has to take
+ * decisions about how to handle its I/O.
+ *
+ * This package presents an alternative model: the I/O of a running process
+ * is controlled by a [[scala.sys.process.ProcessIO]] object, which can be
+ * passed _to_ the code that runs the external process. A `ProcessIO` will
+ * have direct access to the java streams associated with the process I/O. It
+ * must, however, close these streams afterwards.
+ *
+ * Simpler abstractions are available, however. The components of this
+ * package that handle I/O are:
+ *
+ * - [[scala.sys.process.ProcessIO]]: provides the low level abstraction.
+ * - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction
+ * for output, and can be created through its object companion
+ * - [[scala.sys.process.BasicIO]]: a library of helper methods for the
+ * creation of `ProcessIO`.
+ * - This package object itself, with a few implicit conversions.
*
- * One can even create a `Process` solely out of these, without running any command. For example, this will
- * download from a URL to a file:
+ * Some examples of I/O handling:
+ * {{{
+ * import scala.sys.process._
+ *
+ * // An overly complex way of computing size of a compressed file
+ * def gzFileSize(name: String) = {
+ * val cat = Seq("zcat", "name")
+ * var count = 0
+ * def byteCounter(input: java.io.InputStream) = {
+ * while(input.read() != -1) count += 1
+ * input.close()
+ * }
+ * cat ! new ProcessIO(_.close(), byteCounter, _.close())
+ * count
+ * }
+ *
+ * // This "fire-and-forgets" the method, which can be lazily read through
+ * // a Stream[String], and accumulates all errors on a StringBuffer
+ * def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = {
+ * val buffer = new StringBuffer()
+ * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f")
+ * val lines = cmd lines_! ProcessLogger(buffer append _)
+ * (lines, buffer)
+ * }
+ * }}}
*
+ * Instances of the java classes `java.io.File` and `java.net.URL` can both
+ * be used directly as input to other processes, and `java.io.File` can be
+ * used as output as well. One can even pipe one to the other directly
+ * without any intervening process, though that's not a design goal or
+ * recommended usage. For example, the following code will copy a web page to
+ * a file:
* {{{
* import java.io.File
* import java.net.URL
@@ -52,26 +187,33 @@ package scala.sys {
* new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") !
* }}}
*
- * One may use a `Process` directly through `ProcessBuilder`'s `run` method, which starts the process in
- * the background, and returns a `Process`. If background execution is not desired, one can get a
- * `ProcessBuilder` to execute through a method such as `!`, `lines`, `run` or variations thereof. That
- * will create the `Process` to execute the commands, and return either the exit value or the output, maybe
- * throwing an exception.
- *
- * Finally, when executing a `ProcessBuilder`, one may pass a [[scala.sys.process.ProcessLogger]] to
- * capture stdout and stderr of the executing processes. A `ProcessLogger` may be created through its
- * companion object from functions of type `(String) => Unit`, or one might redirect it to a file, using
- * [[scala.sys.process.FileProcessLogger]], which can also be created through `ProcessLogger`'s object
- * companion.
+ * More information about the other ways of controlling I/O can be looked at
+ * in the scaladoc for the associated objects, traits and classes.
+ *
+ * ==Running the Process==
+ *
+ * Paradoxically, this is the simplest component of all, and the one least
+ * likely to be interacted with. It consists solely of
+ * [[scala.sys.process.Process]], and it provides only two methods:
+ *
+ * - `exitValue()`: blocks until the process exit, and then returns the exit
+ * value. This is what happens when one uses the `!` method of
+ * `ProcessBuilder`.
+ * - `destroy()`: this will kill the external process and close the streams
+ * associated with it.
*/
package object process extends ProcessImplicits {
+ /** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
import collection.JavaConversions._
java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
}
+ /** The input stream of this process */
def stdin = java.lang.System.in
+ /** The output stream of this process */
def stdout = java.lang.System.out
+ /** The error stream of this process */
def stderr = java.lang.System.err
}
// private val shell: String => Array[String] =
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 998661895b..22de5544a8 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -142,6 +142,11 @@ private[scala] trait PropertiesTrait {
*/
def isWin = osName startsWith "Windows"
def isMac = javaVendor startsWith "Apple"
+
+ // This is looking for javac, tools.jar, etc.
+ // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
+ // and finally the system property based javaHome.
+ def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
def scalaCmd = if (isWin) "scala.bat" else "scala"
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index 127e6e0ab7..df52b34f87 100644
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -41,7 +41,7 @@ object Elem {
class Elem(
override val prefix: String,
val label: String,
- override val attributes: MetaData,
+ attributes1: MetaData,
override val scope: NamespaceBinding,
val child: Node*)
extends Node with Serializable
@@ -49,6 +49,8 @@ extends Node with Serializable
final override def doCollectNamespaces = true
final override def doTransform = true
+ override val attributes = MetaData.normalize(attributes1, scope)
+
if (prefix == "")
throw new IllegalArgumentException("prefix of zero length, use null instead")
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 98e863eb37..c516747bae 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -38,8 +38,8 @@ object MetaData {
def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
lazy val key = getUniversalKey(md, scope)
if (md eq Null) normalized_attribs
- else if (set(key)) iterate(md.next, normalized_attribs, set)
- else iterate(md.next, md copy normalized_attribs, set + key)
+ else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set)
+ else md copy iterate(md.next, normalized_attribs, set + key)
}
iterate(attribs, Null, Set())
}
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index 436dfcda43..b80d6a1c73 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -13,22 +13,25 @@ package scala.xml
*
* @param pre ...
* @param key ...
- * @param value the attribute value, which may not be null
+ * @param value the attribute value
* @param next ...
*/
class PrefixedAttribute(
val pre: String,
val key: String,
val value: Seq[Node],
- val next: MetaData)
+ val next1: MetaData)
extends Attribute
{
- if (value eq null)
- throw new UnsupportedOperationException("value is null")
+ val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Utility.parseAttributeValue(value), next) */
+ /** same as this(pre, key, Text(value), next), or no attribute if value is null */
def this(pre: String, key: String, value: String, next: MetaData) =
- this(pre, key, Text(value), next)
+ this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next)
+
+ /** same as this(pre, key, value.get, next), or no attribute if value is None */
+ def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) =
+ this(pre, key, value.orNull, next)
/** Returns a copy of this unprefixed attribute with the given
* next field.
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index c56fba1e6c..b6800d5ed1 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -22,7 +22,7 @@ extends Attribute
final val pre = null
val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Text(value), next) */
+ /** same as this(key, Text(value), next), or no attribute if value is null */
def this(key: String, value: String, next: MetaData) =
this(key, if (value ne null) Text(value) else null: NodeSeq, next)
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9b48f4e1bb..fc20b892b9 100644
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -61,7 +61,7 @@ object Utility extends AnyRef with parsing.TokenTests {
val key = md.key
val smaller = sort(md.filter { m => m.key < key })
val greater = sort(md.filter { m => m.key > key })
- smaller.append( Null ).append(md.copy ( greater ))
+ smaller.copy(md.copy ( greater ))
}
/** Return the node with its attribute list sorted alphabetically
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
index 6b6f6c1593..f58097bcb9 100644
--- a/src/library/scala/xml/include/sax/Main.scala
+++ b/src/library/scala/xml/include/sax/Main.scala
@@ -10,11 +10,11 @@
package scala.xml
package include.sax
-import scala.xml.include._
import scala.util.control.Exception.{ catching, ignoring }
import org.xml.sax.XMLReader
import org.xml.sax.helpers.XMLReaderFactory
+@deprecated("Code example will be moved to documentation.", "2.10.0")
object Main {
private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
diff --git a/src/partest-alternative/README b/src/partest-alternative/README
deleted file mode 100644
index c7673fe2f8..0000000000
--- a/src/partest-alternative/README
+++ /dev/null
@@ -1,50 +0,0 @@
-If you're looking for something to read, I suggest running ../test/partest
-with no arguments, which at this moment prints this:
-
-Usage: partest [<options>] [<test> <test> ...]
- <test>: a path to a test designator, typically a .scala file or a directory.
- Examples: files/pos/test1.scala, files/res/bug785
-
- Test categories:
- --all run all tests (default, unless no options given)
- --pos Compile files that are expected to build
- --neg Compile files that are expected to fail
- --run Test JVM backend
- --jvm Test JVM backend
- --res Run resident compiler scenarii
- --buildmanager Run Build Manager scenarii
- --scalacheck Run Scalacheck tests
- --script Run script files
- --shootout Run shootout tests
- --scalap Run scalap tests
-
- Test "smart" categories:
- --grep run all tests with a source file containing <expr>
- --failed run all tests which failed on the last run
-
- Specifying paths and additional flags, ~ means repository root:
- --rootdir path from ~ to partest (default: test)
- --builddir path from ~ to test build (default: build/pack)
- --srcdir path from --rootdir to sources (default: files)
- --javaopts flags to java on all runs (overrides JAVA_OPTS)
- --scalacopts flags to scalac on all tests (overrides SCALAC_OPTS)
- --pack alias for --builddir build/pack
- --quick alias for --builddir build/quick
-
- Options influencing output:
- --trace show the individual steps taken by each test
- --show-diff show diff between log and check file
- --show-log show log on failures
- --dry-run do not run tests, only show their traces.
- --terse be less verbose (almost silent except for failures)
- --verbose be more verbose (additive with --trace)
- --debug maximum debugging output
- --ansi print output in color
-
- Other options:
- --timeout Timeout in seconds
- --cleanup delete all stale files and dirs before run
- --nocleanup do not delete any logfiles or object dirs
- --stats collect and print statistics about the tests
- --validate examine test filesystem for inconsistencies
- --version print version
diff --git a/src/partest-alternative/scala/tools/partest/Actions.scala b/src/partest-alternative/scala/tools/partest/Actions.scala
deleted file mode 100644
index 9a64edeadc..0000000000
--- a/src/partest-alternative/scala/tools/partest/Actions.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import util._
-import nsc.io._
-import scala.sys.process._
-
-trait Actions {
- partest: Universe =>
-
- class TestSequence(val actions: List[TestStep]) extends AbsTestSequence {
- }
-
- implicit def createSequence(xs: List[TestStep]) = new TestSequence(xs)
-
- trait ExecSupport {
- self: TestEntity =>
-
- def execEnv: Map[String, String] = {
- val map = assembleEnvironment()
- val cwd = execCwd.toList map ("CWD" -> _.path)
-
- map ++ cwd
- }
- def execCwd = if (commandFile.isFile) Some(sourcesDir) else None
-
- def runExec(args: List[String]): Boolean = {
- val cmd = fromArgs(args)
-
- if (isVerbose) {
- trace("runExec: " + execEnv.mkString("ENV(", "\n", "\n)"))
- execCwd foreach (x => trace("CWD(" + x + ")"))
- }
-
- trace("runExec: " + cmd)
- isDryRun || execAndLog(cmd)
- }
-
- /** Exec a process to run a command. Assumes 0 exit value is success.
- * Of necessity, also treats no available exit value as success.
- */
- protected def execAndLog(cmd: String) = (cmd #> logFile.jfile !) == 0
- }
-
- trait ScriptableTest {
- self: TestEntity =>
-
- /** Translates a line from a .cmds file into a teststep.
- */
- def customTestStep(line: String): TestStep = {
- trace("customTestStep: " + line)
- val (cmd, rest) = line span (x => !Character.isWhitespace(x))
- def qualify(name: String) = sourcesDir / name path
- val args = toArgs(rest) map qualify
- def fail: TestStep = (_: TestEntity) => error("Parse error: did not understand '%s'" format line)
-
- val f: TestEntity => Boolean = cmd match {
- case "scalac" => _ scalac args
- case "javac" => _ javac args
- case "scala" => _ runScala args
- case _ => fail
- }
- f
- }
- }
-
- trait CompilableTest extends CompileExecSupport {
- self: TestEntity =>
-
- def sourceFiles = location.walk collect { case f: File if isJavaOrScala(f) => f } toList
- def allSources = sourceFiles map (_.path)
- def scalaSources = sourceFiles filter isScala map (_.path)
- def javaSources = sourceFiles filter isJava map (_.path)
-
- /** If there are mixed java and scala files, the standard compilation
- * sequence is:
- *
- * scalac with all files
- * javac with only java files
- * scalac with only scala files
- *
- * This should be expanded to encompass other strategies so we know how
- * well they're working or not working - notably, it would be very useful
- * to know exactly when and how two-pass compilation fails.
- */
- def compile() = {
- trace("compile: " + sourceFiles)
-
- def compileJava() = javac(javaSources)
- def compileScala() = scalac(scalaSources)
- def compileAll() = scalac(allSources)
- def compileMixed() = compileAll() && compileJava() && compileScala()
-
- if (scalaSources.nonEmpty && javaSources.nonEmpty) compileMixed()
- else compileScala()
- }
- }
-
- trait DiffableTest {
- self: TestEntity =>
-
- def checkFile: File = withExtension("check").toFile
- def checkFileRequired =
- returning(checkFile.isFile)(res => if (!res) warnAndLog("A checkFile at '%s' is mandatory.\n" format checkFile.path))
-
- lazy val sourceFileNames = sourceFiles map (_.name)
-
- /** Given the difficulty of verifying that any selective approach works
- * everywhere, the algorithm now is to look for the name of any known
- * source file for this test, and if seen, remove all the non-whitespace
- * preceding it. (Paths with whitespace don't work anyway.) This should
- * wipe out all slashes, backslashes, C:\, cygwin/windows differences,
- * and whatever else makes a simple diff not simple.
- *
- * The log and check file are both transformed, which I don't think is
- * correct -- only the log should be -- but doing it this way until I
- * can clarify martin's comments in #3283.
- */
- def normalizePaths(s: String) =
- sourceFileNames.foldLeft(s)((res, name) => res.replaceAll("""\S+\Q%s\E""" format name, name))
-
- /** The default cleanup normalizes paths relative to sourcesDir,
- * absorbs line terminator differences by going to lines and back,
- * and trims leading or trailing whitespace.
- */
- def diffCleanup(f: File) = safeLines(f) map normalizePaths mkString "\n" trim
-
- /** diffFiles requires actual Files as arguments but the output we want
- * is the post-processed versions of log/check, so we resort to tempfiles.
- */
- lazy val diffOutput = {
- if (!checkFile.exists) "" else {
- val input = diffCleanup(checkFile)
- val output = diffCleanup(logFile)
- def asFile(s: String) = returning(File.makeTemp("partest-diff"))(_ writeAll s)
-
- if (input == output) ""
- else diffFiles(asFile(input), asFile(output))
- }
- }
- private def checkTraceName = tracePath(checkFile)
- private def logTraceName = tracePath(logFile)
- private def isDiffConfirmed = checkFile.exists && (diffOutput == "")
-
- private def sendTraceMsg() {
- def result =
- if (isDryRun) ""
- else if (isDiffConfirmed) " [passed]"
- else if (checkFile.exists) " [failed]"
- else " [unchecked]"
-
- trace("diff %s %s%s".format(checkTraceName, logTraceName, result))
- }
-
- /** If optional is true, a missing check file is considered
- * a successful diff. Necessary since many categories use
- * checkfiles in an ad hoc manner.
- */
- def runDiff() = {
- sendTraceMsg()
-
- def updateCheck = (
- isUpdateCheck && {
- val formatStr = "** diff %s %s: " + (
- if (checkFile.exists) "failed, updating '%s' and marking as passed."
- else if (diffOutput == "") "not creating checkFile at '%s' as there is no output."
- else "was unchecked, creating '%s' for future tests."
- ) + "\n"
-
- normal(formatStr.format(checkTraceName, logTraceName, checkFile.path))
- if (diffOutput != "") normal(diffOutput)
-
- checkFile.writeAll(diffCleanup(logFile), "\n")
- true
- }
- )
-
- isDryRun || isDiffConfirmed || (updateCheck || !checkFile.exists)
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Alarms.scala b/src/partest-alternative/scala/tools/partest/Alarms.scala
deleted file mode 100644
index ef30d13705..0000000000
--- a/src/partest-alternative/scala/tools/partest/Alarms.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package partest
-
-import java.util.{ Timer, TimerTask }
-
-trait Alarms {
- self: Universe =>
-
- def interruptMeIn[T](debugMsg: String, seconds: Int)(body: => T): Option[T] = {
- val thisThread = currentThread
- val alarm = new SimpleAlarm(seconds * 1000) set thisThread.interrupt()
- debug("interruptMeIn(%d) '%s'".format(seconds, debugMsg))
-
- try { Some(body) }
- catch { case _: InterruptedException => debug("Received interrupted exception.") ; None }
- finally { debug("Cancelling interruptMeIn '%s'" format debugMsg) ; alarm.cancel() ; Thread.interrupted() }
- }
-
- case class AlarmerAction(secs: Int, action: () => Unit) extends Runnable {
- override def run() = action()
- }
-
- /** Set any number of alarms up with tuples of the form:
- * seconds to alarm -> Function0[Unit] to execute
- */
- class Alarmer(alarms: AlarmerAction*) {
- import java.util.concurrent._
-
- val exec = Executors.newSingleThreadScheduledExecutor()
- alarms foreach (x => exec.schedule(x, x.secs, TimeUnit.SECONDS))
- exec.shutdown()
-
- def cancelAll() = exec.shutdownNow()
- }
-
- class SimpleAlarm(timeout: Long) {
- private val alarm = new Timer
-
- /** Start a timer, running the given body if it goes off.
- */
- def set(body: => Unit) = returning(new TimerTask { def run() = body })(alarm.schedule(_, timeout))
-
- /** Cancel the timer.
- */
- def cancel() = alarm.cancel()
- }
-
- trait TestAlarms {
- test: TestEntity =>
-
- private def warning1 = AlarmerAction(testWarning, () => warning(
- """|I've been waiting %s seconds for this to complete:
- | %s
- |It may be stuck, or if not, it should be broken into smaller tests.
- |""".stripMargin.format(testWarning, test))
- )
- private def warning2 = AlarmerAction(testWarning * 2, () => warning(
- """|Now I've been waiting %s seconds for this to complete:
- | %s
- |If partest seems hung it would be a good place to look.
- |""".stripMargin.format(testWarning * 2, test))
- )
-
- def startAlarms(onTimeout: => Unit) =
- if (isNoAlarms) new Alarmer() // for alarm debugging
- else new Alarmer(Seq(warning1, warning2, AlarmerAction(testTimeout, () => onTimeout)): _*)
- }
-
- // Thread.setDefaultUncaughtExceptionHandler(new UncaughtException)
- // class UncaughtException extends Thread.UncaughtExceptionHandler {
- // def uncaughtException(t: Thread, e: Throwable) {
- // Console.println("Uncaught in %s: %s".format(t, e))
- // }
- // }
- //
- // lazy val logger = File("/tmp/partest.log").bufferedWriter()
- // def flog(msg: String) = logger synchronized {
- // logger write (msg + "\n")
- // logger.flush()
- // }
-}
diff --git a/src/partest-alternative/scala/tools/partest/BuildContributors.scala b/src/partest-alternative/scala/tools/partest/BuildContributors.scala
deleted file mode 100644
index 85ca895103..0000000000
--- a/src/partest-alternative/scala/tools/partest/BuildContributors.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import nsc.util.ClassPath
-
-trait BuildContributors {
- universe: Universe =>
-
- /** A trait mixed into types which contribute a portion of the values.
- * The basic mechanism is the TestBuild, TestCategory, and TestEntity
- * can each contribute to each value. They are assembled at the last
- * moment by the ContributorAssembler (presently the TestEntity.)
- */
- trait BuildContributor {
- def javaFlags: List[String]
- def scalacFlags: List[String]
- def classpathPaths: List[Path]
- def buildProperties: List[(String, Any)]
- def buildEnvironment: Map[String, String]
- }
-
- trait ContributorAssembler {
- def contributors: List[BuildContributor]
- def assemble[T](what: BuildContributor => List[T]): List[T] = contributors flatMap what
-
- /** !!! This will need work if we want to achieve real composability,
- * but it can wait for the demand.
- */
- def assembleScalacArgs(args: List[String]) = assemble(_.scalacFlags) ++ args
- def assembleJavaArgs(args: List[String]) = assemble(_.javaFlags) ++ args
- def assembleProperties() = assemble(_.buildProperties)
- def assembleClasspaths(paths: List[Path]) = assemble(_.classpathPaths) ++ paths
- def assembleEnvironment() = assemble(_.buildEnvironment.toList).toMap
-
- def createClasspathString() = ClassPath fromPaths (assembleClasspaths(Nil) : _*)
- def createPropertyString() = assembleProperties() map { case (k, v) => "-D%s=%s".format(k, v.toString) }
- }
-
- trait BuildContribution extends BuildContributor {
- self: TestBuild =>
-
- /** The base classpath and system properties.
- * !!! TODO - this should adjust itself depending on the build
- * being tested, because pack and quick at least need different jars.
- */
- def classpathPaths = List[Path](library, compiler, partest, fjbg) ++ forkJoinPath
- def buildProperties = List(
- "scala.home" -> testBuildDir,
- "partest.lib" -> library, // used in jvm/inner
- "java.awt.headless" -> true,
- "user.language" -> "en",
- "user.country" -> "US",
- "partest.debug" -> isDebug,
- "partest.verbose" -> isVerbose
- // Disabled because there are no natives tests.
- // "java.library.path" -> srcLibDir
- )
- def javaFlags: List[String] = toArgs(javaOpts)
- def scalacFlags: List[String] = toArgs(scalacOpts)
-
- /** We put the build being tested's /bin directory in the front of the
- * path so the scripts and such written to execute "scala" will use this
- * build and not whatever happens to be on their path.
- */
- private def modifiedPath = ClassPath.join(scalaBin.path, Properties.envOrElse("PATH", ""))
- def buildEnvironment = Map("PATH" -> modifiedPath)
- }
-
- trait CategoryContribution extends BuildContributor {
- self: DirBasedCategory =>
-
- /** Category-wide classpath additions placed in <category>/lib. */
- private def libContents = root / "lib" ifDirectory (_.list.toList)
-
- def classpathPaths = libContents getOrElse Nil
- def buildProperties = Nil
- def javaFlags = Nil
- def scalacFlags = Nil
- def buildEnvironment = Map()
- }
-
- trait TestContribution extends BuildContributor with ContributorAssembler {
- self: TestEntity =>
-
- def jarsInTestDir = location.walk collect { case f: File if f hasExtension "jar" => f } toList
-
- def contributors = List(build, category, self)
- def javaFlags = safeArgs(javaOptsFile)
- def scalacFlags = safeArgs(scalaOptsFile)
- def classpathPaths = jarsInTestDir :+ outDir
- def buildProperties = List(
- "partest.output" -> outDir.toAbsolute, // used in jvm/inner
- "partest.cwd" -> outDir.parent.toAbsolute // used in shootout tests
- )
- def buildEnvironment = Map("JAVA_OPTS" -> fromArgs(assembleJavaArgs(Nil)))
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Categories.scala b/src/partest-alternative/scala/tools/partest/Categories.scala
deleted file mode 100644
index c517a3f931..0000000000
--- a/src/partest-alternative/scala/tools/partest/Categories.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import nsc.Settings
-import nsc.io._
-import nsc.util.{ ClassPath }
-
-trait Categories {
- self: Universe =>
-
- trait TestCategory extends AbsTestCategory {
- def kind: String
- def startMessage: String = "Executing test group"
- def testSequence: TestSequence
-
- class TestSettings(entity: TestEntity, error: String => Unit) extends Settings(error) {
- def this(entity: TestEntity) = this(entity, Console println _)
-
- deprecation.value = false
- encoding.value = "ISO-8859-1"
- classpath.value = entity.testClasspath
- outdir.value = entity.outDir.path
- }
-
- def createSettings(entity: TestEntity): TestSettings = new TestSettings(entity)
- def createTest(location: Path): TestEntity =
- if (location.isFile) TestFile(this, location.toFile)
- else if (location.isDirectory) TestDirectory(this, location.toDirectory)
- else error("Failed to create test at '%s'" format location)
-
- /** Category test identification.
- */
- def denotesTestFile(p: Path) = p.isFile && (p hasExtension "scala")
- def denotesTestDir(p: Path) = p.isDirectory && !ignorePath(p)
- def denotesTest(p: Path) = denotesTestDir(p) || denotesTestFile(p)
-
- /** This should verify that all necessary files are present.
- * By default it delegates to denotesTest.
- */
- def denotesValidTest(p: Path) = denotesTest(p)
- }
-
- abstract class DirBasedCategory(val kind: String) extends TestCategory with CategoryContribution {
- lazy val root = Directory(src / kind).normalize
- def enumerate = root.list filter denotesTest map createTest toList
-
- /** Standard actions. These can be overridden either on the
- * Category level or by individual tests.
- */
- def compile: TestStep = (_: TestEntity).compile()
- def checkFileRequired: TestStep = (_: TestEntity).checkFileRequired
- def diff: TestStep = (_: TestEntity).diff()
- def run: TestStep = (_: TestEntity).run()
- def exec: TestStep = (_: TestEntity).exec()
-
- /** Combinators.
- */
- def not(f: TestStep): TestStep = !f(_: TestEntity)
-
- override def toString = kind
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Compilable.scala b/src/partest-alternative/scala/tools/partest/Compilable.scala
deleted file mode 100644
index 65b5d5da0e..0000000000
--- a/src/partest-alternative/scala/tools/partest/Compilable.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.tools.nsc.io._
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
-import scala.tools.nsc.util.{ ClassPath }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-
-trait PartestCompilation {
- self: Universe =>
-
- trait CompileExecSupport extends ExecSupport {
- self: TestEntity =>
-
- def javacpArg = "-classpath " + testClasspath
- def scalacpArg = "-usejavacp"
-
- /** Not used, requires tools.jar.
- */
- // def javacInternal(args: List[String]) = {
- // import com.sun.tools.javac.Main
- // Main.compile(args.toArray, logWriter)
- // }
-
- def javac(args: List[String]): Boolean = {
- val allArgString = fromArgs(javacpArg :: javacOpts :: args)
-
- // javac -d outdir -classpath <basepath> <files>
- val cmd = "%s -d %s %s".format(javacCmd, outDir, allArgString)
- def traceMsg =
- if (isVerbose) cmd
- else "%s -d %s %s".format(tracePath(Path(javacCmd)), tracePath(outDir), fromArgs(args))
-
- trace(traceMsg)
-
- isDryRun || execAndLog(cmd)
- }
-
- def scalac(args: List[String]): Boolean = {
- val allArgs = assembleScalacArgs(args)
- val (global, files) = newGlobal(allArgs)
- def nonFileArgs = if (isVerbose) global.settings.recreateArgs else assembleScalacArgs(Nil)
- def traceArgs = fromArgs(nonFileArgs ++ (files map tracePath))
- def traceMsg = "scalac " + traceArgs
-
- trace(traceMsg)
- isDryRun || global.partestCompile(files, true)
- }
-
- /** Actually running the test, post compilation.
- * Normally args will be List("Test", "jvm"), main class and arg to it.
- */
- def runScala(args: List[String]): Boolean = {
- val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
-
- // java $JAVA_OPTS <javaopts> -classpath <cp>
- val javaCmdAndOptions = javaCmd +: assembleJavaArgs(List(javacpArg))
- // MainGenericRunner -usejavacp <scalacopts> Test jvm
- val scalaCmdAndOptions = List(scalaRunnerClass, scalacpArg) ++ assembleScalacArgs(args)
- // Assembled
- val cmd = fromArgs(javaCmdAndOptions ++ createPropertyString() ++ scalaCmdAndOptions)
-
- def traceMsg = if (isVerbose) cmd else fromArgs(javaCmd :: args)
- trace("runScala: " + traceMsg)
-
- isDryRun || execAndLog(cmd)
- }
-
- def newReporter(settings: Settings) = new ConsoleReporter(settings, Console.in, logWriter)
-
- class PartestGlobal(settings: Settings, val creporter: ConsoleReporter) extends Global(settings, creporter) {
- def partestCompile(files: List[String], printSummary: Boolean): Boolean = {
- try { new Run compile files }
- catch {
- case FatalError(msg) => creporter.error(null, "fatal error: " + msg)
- case ae: AssertionError => creporter.error(null, ""+ae)
- case te: TypeError => creporter.error(null, ""+te)
- case ex =>
- creporter.error(null, ""+ex)
- throw ex
- }
-
- if (printSummary)
- creporter.printSummary
-
- creporter.flush()
- !creporter.hasErrors
- }
- }
-
- def newGlobal(args: List[String]): (PartestGlobal, List[String]) = {
- val settings = category createSettings self
- val command = new CompilerCommand(args, settings)
- val reporter = newReporter(settings)
-
- if (!command.ok)
- debug("Error parsing arguments: '%s'".format(args mkString ", "))
-
- (new PartestGlobal(command.settings, reporter), command.files)
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Config.scala b/src/partest-alternative/scala/tools/partest/Config.scala
deleted file mode 100644
index ee1852f6ed..0000000000
--- a/src/partest-alternative/scala/tools/partest/Config.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import io._
-import nsc.io._
-import Properties._
-
-trait Config {
- universe: Universe =>
-
- lazy val src = absolutize(srcDir).toDirectory
- lazy val build = new TestBuild()
-
- def javaHomeEnv = envOrElse("JAVA_HOME", null)
- def javaCmd = envOrElse("JAVACMD", "java")
- def javacCmd = Option(javaHomeEnv) map (x => Path(x) / "bin" / "javac" path) getOrElse "javac"
-
- /** Values related to actors. The timeouts are in seconds. On a dry
- * run we only allocate one worker so the output isn't interspersed.
- */
- def workerTimeout = 3600 // 1 hour, probably overly generous
- def numWorkers = if (isDryRun) 1 else propOrElse("partest.actors", "8").toInt
- def expectedErrors = propOrElse("partest.errors", "0").toInt
- def poolSize = (wrapAccessControl(propOrNone("actors.corePoolSize")) getOrElse "16").toInt
-
- def allScalaFiles = src.deepFiles filter (_ hasExtension "scala")
- def allObjDirs = src.deepDirs filter (_ hasExtension "obj")
- def allLogFiles = src.deepFiles filter (_ hasExtension "log")
- def allClassFiles = src.deepFiles filter (_ hasExtension "class")
-
- class TestBuild() extends BuildContribution {
- import nsc.util.ClassPath
-
- /** Scala core libs.
- */
- val library = pathForComponent("library")
- val compiler = pathForComponent("compiler")
- val partest = pathForComponent("partest")
- val scalap = pathForComponent("scalap", "%s.jar")
-
- /** Scala supplementary libs - these are not all needed for all build targets,
- * and some of them are copied inside other jars in later targets. However quick
- * for instance cannot be run without some of these.
- */
- val fjbg = pathForLibrary("fjbg")
- val msil = pathForLibrary("msil")
- val forkjoin = pathForLibrary("forkjoin")
- val scalacheck = pathForLibrary("scalacheck")
-
- /** Other interesting paths.
- */
- val scalaBin = testBuildDir / "bin"
-
- /** A hack for now to get quick running.
- */
- def needsForkJoin = {
- val loader = nsc.util.ScalaClassLoader.fromURLs(List(library.toURL))
- val fjMarker = "scala.concurrent.forkjoin.ForkJoinTask"
- val clazz = loader.tryToLoadClass(fjMarker)
-
- if (clazz.isDefined) debug("Loaded ForkJoinTask OK, don't need jar.")
- else debug("Could not load ForkJoinTask, putting jar on classpath.")
-
- clazz.isEmpty
- }
- lazy val forkJoinPath: List[Path] = if (needsForkJoin) List(forkjoin) else Nil
-
- /** Internal **/
- private def repo = partestDir.parent.normalize
-
- private def pathForComponent(what: String, jarFormat: String = "scala-%s.jar"): Path = {
- def asDir = testBuildDir / "classes" / what
- def asJar = testBuildDir / "lib" / jarFormat.format(what)
-
- if (asDir.isDirectory) asDir
- else if (asJar.isFile) asJar
- else ""
- }
- private def pathForLibrary(what: String) = File(repo / "lib" / (what + ".jar"))
- }
-
- def printConfigBanner() = {
- debug("Java VM started with arguments: '%s'" format fromArgs(Process.javaVmArguments))
- debug("System Properties:\n" + util.allPropertiesString())
-
- normal(configBanner())
- }
-
- /** Treat an access control failure as None. */
- private def wrapAccessControl[T](body: => Option[T]): Option[T] =
- try body catch { case _: java.security.AccessControlException => None }
-
- private def configBanner() = {
- val javaBin = Path(javaHome) / "bin"
- val javaInfoString = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
-
- List(
- "Scala compiler classes in: " + testBuildDir,
- "Scala version is: " + nsc.Properties.versionMsg,
- "Scalac options are: " + universe.scalacOpts,
- "Java binaries in: " + javaBin,
- "Java runtime is: " + javaInfoString,
- "Java runtime options: " + (Process.javaVmArguments mkString " "),
- "Javac options are: " + universe.javacOpts,
- "Java options are: " + universe.javaOpts,
- "Source directory is: " + src,
- "Selected categories: " + (selectedCategories mkString " "),
- ""
- ) mkString "\n"
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Dispatcher.scala b/src/partest-alternative/scala/tools/partest/Dispatcher.scala
deleted file mode 100644
index 69efc353eb..0000000000
--- a/src/partest-alternative/scala/tools/partest/Dispatcher.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import scala.tools.nsc.io._
-import scala.actors.{ Actor, TIMEOUT }
-import scala.actors.Actor._
-import scala.collection.immutable
-import scala.util.control.Exception.ultimately
-
-/** The machinery for concurrent execution of tests. Each Worker
- * is given a bundle of tests, which it runs sequentially and then
- * sends a report back to the dispatcher.
- */
-trait Dispatcher {
- partest: Universe =>
-
- /** The public entry point. The given filter narrows down the list of
- * tests to run.
- */
- def runSelection(categories: List[TestCategory], filt: TestEntity => Boolean = _ => true): CombinedTestResults = {
- // Setting scala.home informs tests where to obtain their jars.
- setProp("scala.home", testBuildDir.path)
-
- val allTests = allCategories flatMap (_.enumerate)
- val selected = allTests filter filt
- val groups = selected groupBy (_.category)
- val count = selected.size
-
- if (count == 0) return CombinedTestResults(0, 0, 0, Nil)
- else if (count == allTests.size) verbose("Running all %d tests." format count)
- else verbose("Running %d/%d tests: %s".format(count, allTests.size, toStringTrunc(selected map (_.label) mkString ", ")))
-
- allCategories collect { case x if groups contains x => runCategory(x, groups(x)) } reduceLeft (_ ++ _)
- }
-
- private def parallelizeTests(tests: List[TestEntity]): immutable.Map[TestEntity, TestResult] = {
- // propagate verbosity
- if (isDebug) scala.actors.Debug.level = 3
-
- // "If elected, I guarantee a slice of tests for every worker!"
- val groups = tests grouped ((tests.size / numWorkers) + 1) toList
-
- // "Workers, line up for assignments!"
- val workers =
- for ((slice, workerNum) <- groups.zipWithIndex) yield {
- returning(new Worker(workerNum)) { worker =>
- worker.start()
- worker ! TestsToRun(slice)
- }
- }
-
- normal("Started %d workers with ~%d tests each.\n".format(groups.size, groups.head.size))
-
- /** Listening for news from the proletariat.
- */
- (workers map { w =>
- receiveWithin(workerTimeout * 1000) {
- case ResultsOfRun(resultMap) => resultMap
- case TIMEOUT =>
- warning("Worker %d timed out." format w.workerNum)
- // mark all the worker's tests as having timed out - should be hard to miss
- // immutable.Map[TestEntity, TestResult]()
- groups(w.workerNum) map (x => (x -> new Timeout(x))) toMap
- }
- }) reduceLeft (_ ++ _)
- }
-
- private def runCategory(category: TestCategory, tests: List[TestEntity]): CombinedTestResults = {
- val kind = category.kind
- normal("%s (%s tests in %s)\n".format(category.startMessage, tests.size, category))
-
- val (milliSeconds, resultMap) = timed2(parallelizeTests(tests))
- val (passed, failed) = resultsToStatistics(resultMap mapValues (_.state))
- val failures = resultMap.values filterNot (_.passed) toList
-
- CombinedTestResults(passed, failed, milliSeconds, failures)
- }
-
- /** A Worker is given a bundle of tests and runs them all sequentially.
- */
- class Worker(val workerNum: Int) extends Actor {
- def act() {
- react { case TestsToRun(tests) =>
- val master = sender
- runTests(tests)(results => master ! ResultsOfRun(results))
- }
- }
-
- /** Runs the tests. Passes the result Map to onCompletion when done.
- */
- private def runTests(tests: List[TestEntity])(onCompletion: immutable.Map[TestEntity, TestResult] => Unit) {
- var results = new immutable.HashMap[TestEntity, TestResult] // maps tests to results
- val numberOfTests = tests.size
- val testIterator = tests.iterator
- def processed = results.size
- def isComplete = testIterator.isEmpty
-
- def atThreshold(num: Double) = {
- require(num >= 0 && num <= 1.0)
- ((processed - 1).toDouble / numberOfTests <= num) && (processed.toDouble / numberOfTests >= num)
- }
-
- def extraMessage = {
- // for now quiet for normal people
- if (isVerbose || isTrace || isDebug) {
- if (isComplete) "(#%d 100%%)" format workerNum
- else if (isVerbose) "(#%d %d/%d)".format(workerNum, processed, numberOfTests)
- else if (isTrace && atThreshold(0.5)) "(#%d 50%%)" format workerNum
- else ""
- }
- else ""
- }
-
- def countAndReport(result: TestResult) {
- val TestResult(test, state) = result
- // refuse to count an entity twice
- if (results contains test)
- return warning("Received duplicate result for %s: was %s, now %s".format(test, results(test), state))
-
- // increment the counter for this result state
- results += (test -> result)
-
- // show on screen
- if (isDryRun) normal("\n") // blank line between dry run traces
- else result show extraMessage
-
- // remove log if successful
- if (result.passed)
- test.deleteLog()
-
- // Respond to master if this Worker is complete
- if (isComplete)
- onCompletion(results)
- }
-
- Actor.loopWhile(testIterator.hasNext) {
- val parent = self
- // pick a test and set some alarms
- val test = testIterator.next
- val alarmer = test startAlarms (parent ! new Timeout(test))
-
- actor {
- ultimately(alarmer.cancelAll()) {
- // Calling isSuccess forces the lazy val "process" inside the test, running it.
- val res = test.isSuccess
- // Cancel the alarms and alert the media.
- parent ! TestResult(test, res)
- }
- }
-
- react {
- case x: TestResult => countAndReport(x)
- }
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Entities.scala b/src/partest-alternative/scala/tools/partest/Entities.scala
deleted file mode 100644
index 301deb972b..0000000000
--- a/src/partest-alternative/scala/tools/partest/Entities.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-
-trait Entities {
- self: Universe =>
-
- abstract class TestEntity extends AbsTestEntity
- with TestContribution
- with TestHousekeeping
- with TestAlarms
- with EntityLogging
- with CompilableTest
- with ScriptableTest
- with DiffableTest {
- def location: Path
- def category: TestCategory
-
- lazy val label = location.stripExtension
- lazy val testClasspath = returning(createClasspathString())(x => vtrace("testClasspath: " + x))
-
- /** Was this test successful? Calling this for the first time forces
- * lazy val "process" which actually runs the test.
- */
- def isSuccess = process
-
- /** Some standard files, which may or may not be present.
- */
- def scalaOptsFile = withExtension("flags").toFile // opts to scalac
- def javaOptsFile = withExtension("javaopts").toFile // opts to java (but not javac)
- def commandFile = withExtension("cmds").toFile // sequence of commands to execute
- def logFile = withExtension("log").toFile // collected output
-
- /** Some standard directories.
- */
- def outDir = withExtension("obj").toDirectory // output dir, e.g. files/pos/t14.obj
- def categoryDir = location.parent.normalize // category dir, e.g. files/pos/
- def sourcesDir = location ifDirectory (_.normalize) getOrElse categoryDir
-
- /** Standard arguments for run, exec, diff.
- */
- def argumentsToRun = List("Test", "jvm")
- def argumentsToExec = List(location.path)
-
- /** Using a .cmds file for a custom test sequence.
- */
- def commandList = safeLines(commandFile)
- def testSequence =
- if (commandFile.isFile && commandList.nonEmpty) commandList map customTestStep
- else category.testSequence
-
- def run() = runScala(argumentsToRun)
- def exec() = runExec(argumentsToExec)
- def diff() = runDiff() // checkFile, logFile
-
- /** The memoized result of the test run.
- */
- private lazy val process = {
- val outcome = runWrappers(testSequence.actions forall (f => f(this)))
-
- // an empty outcome means we've been interrupted and are shutting down.
- outcome getOrElse false
- }
- }
-
- case class TestDirectory(category: TestCategory, location: Directory) extends TestEntity { }
- case class TestFile(category: TestCategory, location: File) extends TestEntity { }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Housekeeping.scala b/src/partest-alternative/scala/tools/partest/Housekeeping.scala
deleted file mode 100644
index cfdecee9c7..0000000000
--- a/src/partest-alternative/scala/tools/partest/Housekeeping.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.util.control.Exception.catching
-import util._
-import nsc.io._
-import Process.runtime
-import Properties._
-
-/** An agglomeration of code which is low on thrills. Hopefully
- * it operates so quietly in the background that you never have to
- * look at this file.
- */
-trait Housekeeping {
- self: Universe =>
-
- /** Orderly shutdown on ctrl-C. */
- @volatile private var _shuttingDown = false
- protected def setShuttingDown() = {
- /** Whatever we want to do as shutdown begins goes here. */
- if (!_shuttingDown) {
- warning("Received shutdown signal, partest is cleaning up...\n")
- _shuttingDown = true
- }
- }
- def isShuttingDown = _shuttingDown
-
- /** Execute some code with a shutdown hook in place. This is
- * motivated by the desire not to leave the filesystem full of
- * junk when someone ctrl-Cs a test run.
- */
- def withShutdownHook[T](hook: => Unit)(body: => T): Option[T] =
- /** Java doesn't like it if you keep adding and removing shutdown
- * hooks after shutdown has begun, so we trap the failure.
- */
- catching(classOf[IllegalStateException]) opt {
- val t = new Thread() {
- override def run() = {
- setShuttingDown()
- hook
- }
- }
- runtime addShutdownHook t
-
- try body
- finally runtime removeShutdownHook t
- }
-
- /** Search for a directory, possibly given only a name, by starting
- * at the current dir and walking upward looking for it at each level.
- */
- protected def searchForDir(name: String): Directory = {
- val result = Path(name) ifDirectory (x => x.normalize) orElse {
- val cwd = Directory.Current getOrElse error("user.dir property not set")
- val dirs = cwd :: cwd.parents map (_ / name)
-
- Path onlyDirs dirs map (_.normalize) headOption
- }
-
- result getOrElse error("Fatal: could not find directory '%s'" format name)
- }
-
- /** Paths we ignore for most purposes.
- */
- def ignorePath(x: Path) = {
- (x.name startsWith ".") ||
- (x.isDirectory && ((x.name == "lib") || x.hasExtension("obj", "svn")))
- }
- /** Make a possibly relative path absolute using partestDir as the base.
- */
- def absolutize(path: String) = Path(path) toAbsoluteWithRoot partestDir
-
- /** Go on a deleting binge.
- */
- def cleanupAll() {
- if (isNoCleanup)
- return
-
- val (dirCount, fileCount) = (cleanupObjDirs(), cleanupLogs() + cleanupJunk())
- if (dirCount + fileCount > 0)
- normal("Cleaned up %d directories and %d files.\n".format(dirCount, fileCount))
- }
-
- def cleanupObjDirs() = countTrue(allObjDirs collect { case x if x.exists => x.deleteRecursively() })
- def cleanupJunk() = countTrue(allClassFiles collect { case x if x.exists => x.delete() })
- def cleanupLogs() = countTrue(allLogFiles collect { case x if x.exists => x.delete() })
-
- /** Look through every file in the partest directory and ask around
- * to make sure someone knows him. Complain about strangers.
- */
- def validateAll() {
- def denotesTest(p: Path) = allCategories exists (_ denotesTest p)
- def isMSILcheck(p: Path) = p.name endsWith "-msil.check"
-
- def analyzeCategory(cat: DirBasedCategory) = {
- val allTests = cat.enumerate
- val otherPaths = cat.root walkFilter (x => !ignorePath(x)) filterNot (cat denotesTest _) filterNot isMSILcheck toList
- val count = otherPaths.size
-
- println("Validating %d non-test paths in %s.".format(count, cat.kind))
-
- for (path <- otherPaths) {
- (allTests find (_ acknowledges path)) match {
- case Some(test) => if (isVerbose) println(" OK: '%s' is claimed by '%s'".format(path, test.label))
- case _ => println(">> Unknown path '%s'" format path)
- }
- }
- }
-
- allCategories collect { case x: DirBasedCategory => analyzeCategory(x) }
- }
-
- trait TestHousekeeping {
- self: TestEntity =>
-
- /** Calculating derived files. Given a test like
- * files/run/foo.scala or files/run/foo/
- * This creates paths like foo.check, foo.flags, etc.
- */
- def withExtension(extension: String) = categoryDir / "%s.%s".format(label, extension)
-
- /** True for a path if this test acknowledges it belongs to this test.
- * Overridden by some categories.
- */
- def acknowledges(path: Path): Boolean = {
- val loc = location.normalize
- val knownPaths = List(scalaOptsFile, javaOptsFile, commandFile, logFile, checkFile) ++ jarsInTestDir
- def isContainedSource = location.isDirectory && isJavaOrScala(path) && (path.normalize startsWith loc)
-
- (knownPaths exists (_ isSame path)) || isContainedSource
- }
-
- /** This test "responds to" this String. This could mean anything -- it's a
- * way of specifying ad-hoc collections of tests to exercise only a subset of tests.
- * At present it looks for the given String in all the test sources.
- */
- def respondsToString(str: String) = containsString(str)
- def containsString(str: String) = {
- debug("Checking %s for \"%s\"".format(sourceFiles mkString ", ", str))
- sourceFiles map safeSlurp exists (_ contains str)
- }
-
- def possiblyTimed[T](body: => T): T = {
- if (isStats) timed(recordTestTiming(label, _))(body)
- else body
- }
-
- private def prepareForTestRun() = {
- // make sure we have a clean slate
- deleteLog(force = true)
- if (outDir.exists)
- outDir.deleteRecursively()
-
- // recreate object dir
- outDir createDirectory true
- }
- def deleteOutDir() = outDir.deleteRecursively()
- def deleteShutdownHook() = { debug("Shutdown hook deleting " + outDir) ; deleteOutDir() }
-
- protected def runWrappers[T](body: => T): Option[T] = {
- prepareForTestRun()
-
- withShutdownHook(deleteShutdownHook()) {
- loggingOutAndErr {
- val result = possiblyTimed { body }
- if (!isNoCleanup)
- deleteOutDir()
-
- result
- }
- }
- }
-
- override def toString = location.path
- override def equals(other: Any) = other match {
- case x: TestEntity => location.normalize == x.location.normalize
- case _ => false
- }
- override def hashCode = location.normalize.hashCode
- }
-
- private def countTrue(f: => Iterator[Boolean]) = f filter (_ == true) length
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Partest.scala b/src/partest-alternative/scala/tools/partest/Partest.scala
deleted file mode 100644
index 74a3a6a19b..0000000000
--- a/src/partest-alternative/scala/tools/partest/Partest.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import nsc.util._
-import category.AllCategories
-
-/** Global object for a Partest run. It is completely configured by the list
- * of arguments passed to the constructor (although there are a few properties
- * and environment variables which can influence matters.) See PartestSpec.scala
- * for the complete list.
- */
-class Partest(args: List[String]) extends {
- val parsed = PartestSpec(args: _*)
-} with Universe with PartestSpec with cmd.Instance with AllCategories {
-
- if (parsed.propertyArgs.nonEmpty)
- debug("Partest property args: " + fromArgs(parsed.propertyArgs))
-
- debug("Partest created with args: " + fromArgs(args))
-
- def helpMsg = PartestSpec.helpMsg
-
- // The abstract values from Universe.
- lazy val testBuildDir = searchForDir(buildDir)
- lazy val partestDir = searchForDir(rootDir)
- lazy val allCategories = List(Pos, Neg, Run, Jvm, Res, Shootout, Scalap, Scalacheck, BuildManager, Script)
- lazy val selectedCategories = if (isAllImplied) allCategories else specifiedCats
-
- def specifiedTests = parsed.residualArgs map (x => Path(x).normalize)
- def specifiedKinds = testKinds filter (x => isSet(x) || (runSets contains x))
- def specifiedCats = specifiedKinds flatMap (x => allCategories find (_.kind == x))
- def isAllImplied = isAll || (specifiedTests.isEmpty && specifiedKinds.isEmpty)
-
- /** Assembles a filter based on command line options which restrict the test set
- * --grep limits to only matching tests
- * --failed limits to only recently failed tests (log file is present)
- * --<category> limits to only the given tests and categories (but --all overrides)
- * path/to/Test limits to only the given tests and categories
- */
- lazy val filter = {
- def indivFilter(test: TestEntity) = specifiedTests contains test.location.normalize
- def categoryFilter(test: TestEntity) = specifiedCats contains test.category
- def indivOrCat(test: TestEntity) = isAllImplied || indivFilter(test) || categoryFilter(test) // combines previous two
-
- def failedFilter(test: TestEntity) = !isFailed || (test.logFile exists)
- def grepFilter(test: TestEntity) = grepExpr.isEmpty || (test containsString grepExpr.get)
- def combinedFilter(x: TestEntity) = indivOrCat(x) && failedFilter(x) && grepFilter(x) // combines previous three
-
- combinedFilter _
- }
-
- def launchTestSuite() = {
- def onTimeout() = {
- warning("Partest test run timed out after " + timeout + " seconds.\n")
- System.exit(-1)
- }
- val alarm = new Alarmer(AlarmerAction(timeout, () => onTimeout()))
-
- try runSelection(selectedCategories, filter)
- finally alarm.cancelAll()
- }
-}
-
-object Partest {
- def fromBuild(dir: String, args: String*): Partest = apply("--builddir" +: dir +: args: _*)
- def apply(args: String*): Partest = new Partest(args.toList)
-
- // builds without partest jars won't actually work
- def starr() = fromBuild("")
- def locker() = fromBuild("build/locker")
- def quick() = fromBuild("build/quick")
- def pack() = fromBuild("build/pack")
- def strap() = fromBuild("build/strap")
- def dist() = fromBuild("dists/latest")
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/PartestSpec.scala b/src/partest-alternative/scala/tools/partest/PartestSpec.scala
deleted file mode 100644
index 75d94bdb72..0000000000
--- a/src/partest-alternative/scala/tools/partest/PartestSpec.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import cmd._
-
-/** This takes advantage of bits of scala goodness to fully define a command
- * line program with a minimum of duplicated code. When the specification object
- * is created, the vals are evaluated in order and each of them side effects
- * a private accumulator. What emerges is a full list of the valid unary
- * and binary arguments, as well as autogenerated help.
- */
-trait PartestSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = PartestSpec
- def programInfo = Spec.Info("partest", "", "scala.tools.partest.Runner")
- private val kind = new Spec.Accumulator[String]()
- protected def testKinds = kind.get
-
- private implicit val tokenizeString = FromString.ArgumentsFromString // String => List[String]
-
- help("""
- |# Pro Tip! Instant bash completion: `partest --bash` (note backticks)
- |Usage: partest [<options>] [<test> <test> ...]
- | <test>: a path to a test designator, typically a .scala file or a directory.
- | Examples: files/pos/test1.scala, files/res/bug785
- |
- | Test categories:""".stripMargin)
-
- val isAll = ("all" / "run all tests (default, unless no options given)" --?)
- (kind("pos") / "Compile files that are expected to build" --?)
- (kind("neg") / "Compile files that are expected to fail" --?)
- (kind("run") / "Test JVM backend" --?)
- (kind("jvm") / "Test JVM backend" --?)
- (kind("res") / "Run resident compiler scenarii" --?)
- (kind("buildmanager") / "Run Build Manager scenarii" --?)
- (kind("scalacheck") / "Run Scalacheck tests" --?)
- (kind("script") / "Run script files" --?)
- (kind("shootout") / "Run shootout tests" --?)
- (kind("scalap") / "Run scalap tests" --?)
-
- heading ("""Test "smart" categories:""")
- val grepExpr = "grep" / "run all tests with a source file containing <expr>" --|
- val isFailed = "failed" / "run all tests which failed on the last run" --?
-
- heading ("Specifying paths and additional flags, ~ means repository root:")
-
- val rootDir = "rootdir" / "path from ~ to partest" defaultTo "test"
- val buildDir = "builddir" / "path from ~ to test build" defaultTo "build/pack"
- val srcDir = "srcdir" / "path from --rootdir to sources" defaultTo "files"
- val javaOpts = "javaopts" / "flags to java on all runs" defaultToEnv "JAVA_OPTS"
- val javacOpts = "javacopts" / "flags to javac on all runs" defaultToEnv "JAVAC_OPTS"
- val scalacOpts = "scalacopts" / "flags to scalac on all tests" defaultToEnv "SCALAC_OPTS"
-
- "pack" / "" expandTo ("--builddir", "build/pack")
- "quick" / "" expandTo ("--builddir", "build/quick")
-
- heading ("Options influencing output:")
- val isTrace = "trace" / "show the individual steps taken by each test" --?
- val isShowDiff = "show-diff" / "show diff between log and check file" --?
- val isShowLog = "show-log" / "show log on failures" --?
- val isDryRun = "dry-run" / "do not run tests, only show their traces." --?
- val isTerse = "terse" / "be less verbose (almost silent except for failures)" --?
- val isVerbose = "verbose" / "be more verbose (additive with --trace)" --?
- val isDebug = "debug" / "maximum debugging output" --?
- val isAnsi = "ansi" / "print output in color" --?
-
- heading ("Other options:")
- val timeout = "timeout" / "Overall timeout in seconds" defaultTo 7200
- val testWarning = "test-warning" / "Test warning in seconds" defaultTo 90
- val testTimeout = "test-timeout" / "Test timeout in seconds" defaultTo 900
- val isCleanup = "cleanup" / "delete all stale files and dirs before run" --?
- val isNoCleanup = "nocleanup" / "do not delete any logfiles or object dirs" --?
- val isStats = "stats" / "collect and print statistics about the tests" --?
- val isValidate = "validate" / "examine test filesystem for inconsistencies" --?
- val isUpdateCheck = "update-check" / "overwrite checkFile if diff fails" --?
-
- "version" / "print version" --> runAndExit(println(Properties.versionMsg))
-
- // no help for anything below this line - secret options
- // mostly intended for property configuration.
- val runSets = ("runsets" --^) getOrElse Nil
- val isNoAlarms = "noalarms" --?
- val isInsideAnt = "is-in-ant" --?
-}
-
-object PartestSpec extends PartestSpec with Property {
- lazy val propMapper = new PropertyMapper(PartestSpec) {
- override def isPassThrough(key: String) = key == "partest.options"
- }
-
- type ThisCommandLine = PartestCommandLine
- class PartestCommandLine(args: List[String]) extends SpecCommandLine(args) {
- override def errorFn(msg: String) = printAndExit("Error: " + msg)
-
- def propertyArgs = PartestSpec.propertyArgs
- }
-
- override def creator(args: List[String]): PartestCommandLine = new PartestCommandLine(args)
-}
diff --git a/src/partest-alternative/scala/tools/partest/Results.scala b/src/partest-alternative/scala/tools/partest/Results.scala
deleted file mode 100644
index e0fceed17a..0000000000
--- a/src/partest-alternative/scala/tools/partest/Results.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.collection.immutable
-
-trait Results {
- self: Universe =>
-
- /** A collection of tests for a Worker.
- */
- case class TestsToRun(entities: List[TestEntity])
-
- /** The response from a Worker who has been given TestsToRun.
- */
- case class ResultsOfRun(results: immutable.Map[TestEntity, TestResult])
-
- /** The result of a single test. (0: OK, 1: FAILED, 2: TIMEOUT)
- */
- sealed abstract class TestResult(val state: Int, val description: String) {
- def entity: TestEntity
-
- def passed = state == 0
- def colorize(s: String): String
- def show(msg: String) =
- if (!isShuttingDown)
- showResult(colorize(description), msg)
-
- private def outputPrefix = if (isInsideAnt) "" else markNormal("partest: ")
- private def name = src relativize entity.location // e.g. "neg/test.scala"
- private def showResult(status: String, extraMsg: String) =
- normal(outputPrefix + "[...]/%-40s [%s] %s\n".format(name, status, extraMsg))
-
- override def equals(other: Any) = other match {
- case x: TestResult => entity == x.entity
- case _ => false
- }
- override def hashCode = entity.hashCode
- override def toString = "%s [%s]".format(entity, description)
- }
-
- class Success(val entity: TestEntity) extends TestResult(0, " OK ") {
- def colorize(s: String) = markSuccess(s)
- override def show(msg: String) = if (!isTerse) super.show(msg)
- }
- class Failure(val entity: TestEntity) extends TestResult(1, " FAILED ") {
- def colorize(s: String) = markFailure(s)
-
- override def show(msg: String) = {
- super.show(msg)
-
- if (isShowDiff || isTrace)
- normal(entity.diffOutput)
-
- if (isShowLog || isTrace)
- normal(toStringTrunc(entity.failureMessage(), 1600))
- }
- override def toString = List(super.toString, toStringTrunc(entity.failureMessage(), 400)) mkString "\n"
- }
- class Timeout(val entity: TestEntity) extends TestResult(2, "TIME OUT") {
- def colorize(s: String) = markFailure(s)
- }
-
- object TestResult {
- def apply(entity: TestEntity, success: Boolean) =
- if (success) new Success(entity)
- else new Failure(entity)
-
- def apply(entity: TestEntity, state: Int) = state match {
- case 0 => new Success(entity)
- case 1 => new Failure(entity)
- case 2 => new Timeout(entity)
- }
- def unapply(x: Any) = x match {
- case x: TestResult => Some((x.entity, x.state))
- case _ => None
- }
- }
-
- /** The combined results of any number of tests.
- */
- case class CombinedTestResults(
- passed: Int,
- failed: Int,
- elapsedMilliseconds: Long,
- failures: List[TestResult]
- ) {
- // housekeeping
- val elapsedSecs = elapsedMilliseconds / 1000
- val elapsedMins = elapsedSecs / 60
- val elapsedHrs = elapsedMins / 60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- def total = passed + failed
- def hasFailures = failed > 0
- def exitCode = if (expectedErrors == failed) 0 else 1
-
- def ++(x: CombinedTestResults) = CombinedTestResults(
- passed + x.passed,
- failed + x.failed,
- elapsedMilliseconds + x.elapsedMilliseconds,
- failures ::: x.failures
- )
-
- def elapsedString = "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
- def failuresString = {
- if (failures.isEmpty) ""
- else "Summary of failures:" :: failures mkString ("\n", "\n", "")
- }
-
- override def toString =
- if (total == 0) "There were no tests to run."
- else if (isDryRun) "%d tests would be run." format total
- else if (hasFailures) "%d of %d tests failed (elapsed time: %s)".format(failed, total, elapsedString) + failuresString
- else "All %d tests were successful (elapsed time: %s)".format(total, elapsedString)
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Runner.scala b/src/partest-alternative/scala/tools/partest/Runner.scala
deleted file mode 100644
index 7fe2c98d43..0000000000
--- a/src/partest-alternative/scala/tools/partest/Runner.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-
-object Runner {
- def main(args: Array[String]) {
- val runner = Partest(args: _*)
- import runner._
-
- if (args.isEmpty) return println(helpMsg)
- if (isValidate) return validateAll()
-
- printConfigBanner()
-
- if (isCleanup)
- cleanupAll()
-
- val result = launchTestSuite()
- val exitCode = result.exitCode
- val message = "\n" + result + "\n"
-
- if (exitCode == 0) success(message)
- else failure(message)
-
- if (isStats)
- showTestStatistics()
-
- System exit exitCode
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Statistics.scala b/src/partest-alternative/scala/tools/partest/Statistics.scala
deleted file mode 100644
index e90377cfa7..0000000000
--- a/src/partest-alternative/scala/tools/partest/Statistics.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import scala.collection.mutable
-
-trait Statistics {
- /** Only collected when --stats is given. */
- lazy val testStatistics = new mutable.HashMap[String, Long]
-
- /** Given function and block of code, evaluates code block,
- * calls function with milliseconds elapsed, and returns block result.
- */
- def timed[T](f: Long => Unit)(body: => T): T = {
- val start = System.currentTimeMillis
- val result = body
- val end = System.currentTimeMillis
-
- f(end - start)
- result
- }
- /** Times body and returns both values.
- */
- def timed2[T](body: => T): (Long, T) = {
- var milliSeconds = 0L
- val result = timed(x => milliSeconds = x)(body)
-
- (milliSeconds, result)
- }
-
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) =
- (results partition (_._2 == 0)) match {
- case (winners, losers) => (winners.size, losers.size)
- }
-
- def recordTestTiming(name: String, milliseconds: Long) =
- synchronized { testStatistics(name) = milliseconds }
-
- def showTestStatistics() {
- testStatistics.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %.2f seconds".format(k, (v.toDouble / 1000))) }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Universe.scala b/src/partest-alternative/scala/tools/partest/Universe.scala
deleted file mode 100644
index 3dd79e4791..0000000000
--- a/src/partest-alternative/scala/tools/partest/Universe.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import category.AllCategories
-import io.Logging
-
-/** The high level view of the partest infrastructure.
- */
-abstract class Universe
- extends Entities
- with BuildContributors
- with Logging
- with Dispatcher
- with Statistics
- with Housekeeping
- with Results
- with PartestCompilation
- with PartestSpec
- with Config
- with Alarms
- with Actions
- with Categories {
-
- /** The abstract values from which all else is derived. */
- def partestDir: Directory
- def testBuildDir: Directory
- def allCategories: List[TestCategory]
- def selectedCategories: List[TestCategory]
-
- /** Some plausibly abstract types. */
- type TestBuild <: BuildContributor // e.g. quick, pack
- type TestCategory <: AbsTestCategory // e.g. pos, neg, run
- type TestEntity <: AbsTestEntity // e.g. files/pos/test25.scala
- type TestSequence <: AbsTestSequence // e.g. compile, run, diff
-
- /** Although TestStep isn't much more than Function1 right now,
- * it exists this way so it can become more capable.
- */
- implicit def f1ToTestStep(f: TestEntity => Boolean): TestStep =
- new TestStep { def apply(test: TestEntity) = f(test) }
-
- abstract class TestStep extends (TestEntity => Boolean) {
- def apply(test: TestEntity): Boolean
- }
-
- /** An umbrella category of tests, such as "pos" or "run".
- */
- trait AbsTestCategory extends BuildContributor {
- type TestSettings
-
- def kind: String
- def testSequence: TestSequence
- def denotesTest(location: Path): Boolean
-
- def createTest(location: Path): TestEntity
- def createSettings(entity: TestEntity): TestSettings
- def enumerate: List[TestEntity]
- }
-
- /** A single test. It may involve multiple files, but only a
- * single path is used to designate it.
- */
- trait AbsTestEntity extends BuildContributor {
- def category: TestCategory
- def location: Path
- def onException(x: Throwable): Unit
- def testClasspath: String
-
- /** Most tests will use the sequence defined by the category,
- * but the test can override and define a custom sequence.
- */
- def testSequence: TestSequence
-
- /** True if this test recognizes the given path as a piece of it.
- * For validation purposes.
- */
- def acknowledges(path: Path): Boolean
- }
-
- /** Every TestEntity is partly characterized by a series of actions
- * which are applied to the TestEntity in the given order. The test
- * passes if all those actions return true, fails otherwise.
- */
- trait AbsTestSequence {
- def actions: List[TestStep]
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala b/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
deleted file mode 100644
index f8c0133dc1..0000000000
--- a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package ant
-
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.taskdefs.Java
-import org.apache.tools.ant.types.Environment
-
-import scala.tools.nsc.io._
-import scala.tools.nsc.util.ClassPath
-import cmd.Spec._
-
-class JavaTask extends Java {
- override def getTaskName() = "partest"
- private val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
- private val partestRunnerClass = "scala.tools.partest.Runner"
- def defaultJvmArgs = "-Xms64M -Xmx768M -Xss768K -XX:MaxPermSize=96M"
-
- protected def rootDir = prop("partest.rootdir") getOrElse (baseDir / "test").path
- protected def partestJVMArgs = prop("partest.jvm.args") getOrElse defaultJvmArgs
- protected def runnerArgs = List("-usejavacp", partestRunnerClass, "--javaopts", partestJVMArgs)
-
- private def baseDir = Directory(getProject.getBaseDir)
- private def prop(s: String) = Option(getProject getProperty s)
- private def jvmline(s: String) = returning(createJvmarg())(_ setLine s)
- private def addArg(s: String) = returning(createArg())(_ setValue s)
-
- private def newKeyValue(key: String, value: String) =
- returning(new Environment.Variable)(x => { x setKey key ; x setValue value })
-
- def setDefaults() {
- setFork(true)
- setFailonerror(true)
- getProject.setSystemProperties()
- setClassname(scalaRunnerClass)
- addSysproperty(newKeyValue("partest.is-in-ant", "true"))
- jvmline(partestJVMArgs)
- runnerArgs foreach addArg
-
- // do we want basedir or rootDir to be the cwd?
- // setDir(Path(rootDir).jfile)
- }
-
- override def init() = {
- super.init()
- setDefaults()
- }
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/antlib.xml b/src/partest-alternative/scala/tools/partest/antlib.xml
deleted file mode 100644
index af36f11368..0000000000
--- a/src/partest-alternative/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,3 +0,0 @@
-<antlib>
- <taskdef name="partest" classname="scala.tools.partest.ant.JavaTask"/>
-</antlib>
diff --git a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala b/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
deleted file mode 100644
index 1c3f4c9899..0000000000
--- a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package category
-
-trait AllCategories extends Compiler with Analysis with Runner {
- self: Universe =>
-
- object Pos extends DirBasedCategory("pos") { lazy val testSequence: TestSequence = List(compile) }
- object Neg extends DirBasedCategory("neg") { lazy val testSequence: TestSequence = List(checkFileRequired, not(compile), diff) }
- object Run extends DirBasedCategory("run") { lazy val testSequence: TestSequence = List(compile, run, diff) }
- object Jvm extends DirBasedCategory("jvm") { lazy val testSequence: TestSequence = List(compile, run, diff) }
-}
diff --git a/src/partest-alternative/scala/tools/partest/category/Analysis.scala b/src/partest-alternative/scala/tools/partest/category/Analysis.scala
deleted file mode 100644
index 944f8c691f..0000000000
--- a/src/partest-alternative/scala/tools/partest/category/Analysis.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package category
-
-import java.lang.{ ClassLoader => JavaClassLoader }
-import java.net.URL
-import nsc.util.ScalaClassLoader
-import nsc.io._
-
-class PartestClassLoader(urls: Array[URL], parent: JavaClassLoader) extends ScalaClassLoader.URLClassLoader(urls, parent) {
- def this(urls: Array[URL]) = this(urls, null)
- def bytes(path: String) = findBytesForClassName(path)
- def singleton(path: String) = tryToInitializeClass(path).get getField "MODULE$" get null
-
- /** Calls a method in an object via reflection.
- */
- def apply[T](className: String, methodName: String)(args: Any*): T = {
- def fail = error("Reflection failed on %s.%s".format(className, methodName))
- val clazz = tryToLoadClass(className) getOrElse fail
- val obj = singleton(className)
- val m = clazz.getMethods find (x => x.getName == methodName && x.getParameterTypes.size == args.size) getOrElse fail
-
- m.invoke(obj, args map (_.asInstanceOf[AnyRef]): _*).asInstanceOf[T]
- }
-}
-
-trait Analysis {
- self: Universe =>
-
- object Scalap extends DirBasedCategory("scalap") {
- val testSequence: TestSequence = List(checkFileRequired, compile, run, diff)
- override def denotesTest(p: Path) = p.isDirectory && (p.toDirectory.files exists (_.name == "result.test"))
- override def createTest(location: Path) = new ScalapTest(location)
-
- class ScalapTest(val location: Path) extends TestEntity {
- val category = Scalap
- val scalapMain = "scala.tools.scalap.Main$"
- val scalapMethod = "decompileScala"
-
- override def classpathPaths = super.classpathPaths :+ build.scalap
- override def checkFile = File(location / "result.test")
-
- private def runnerURLs = build.classpathPaths ::: classpathPaths map (_.toURL)
- private def createClassLoader = new PartestClassLoader(runnerURLs.toArray, this.getClass.getClassLoader)
-
- val isPackageObject = containsString("package object")
- val suffix = if (isPackageObject) ".package" else ""
- val className = location.name.capitalize + suffix
-
- override def run() = loggingResult {
- def loader = createClassLoader
- def bytes = loader.bytes(className)
-
- trace("scalap %s".format(className))
- if (isDryRun) ""
- else loader[String](scalapMain, scalapMethod)(bytes, isPackageObject)
- }
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/category/Compiler.scala b/src/partest-alternative/scala/tools/partest/category/Compiler.scala
deleted file mode 100644
index 6b65072856..0000000000
--- a/src/partest-alternative/scala/tools/partest/category/Compiler.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package category
-
-import nsc.io._
-import nsc.reporters._
-import nsc.{ Settings, CompilerCommand }
-import scala.tools.nsc.interactive.RefinedBuildManager
-import util.copyPath
-
-trait Compiler {
- self: Universe =>
-
- /** Resident Compiler.
- * $SCALAC -d dir.obj -Xresident -sourcepath . "$@"
- */
- object Res extends DirBasedCategory("res") {
- lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
-
- override def denotesTest(p: Path) = p.isDirectory && resFile(p).isFile
- override def createTest(location: Path) = new ResidentTest(location.toDirectory)
-
- override def createSettings(entity: TestEntity): TestSettings =
- returning(super.createSettings(entity)) { settings =>
- settings.resident.value = true
- settings.sourcepath.value = entity.sourcesDir.path
- }
-
- class ResidentTest(val location: Directory) extends TestEntity {
- val category = Res
- override def sourcesDir = categoryDir
-
- override def acknowledges(p: Path) =
- super.acknowledges(p) || (resFile(location) isSame p)
-
- private def residentCompilerCommands = safeLines(resFile(location))
- private def compileResident(global: PartestGlobal, lines: List[String]) = {
- def printPrompt = global inform "nsc> "
- val results =
- lines map { line =>
- printPrompt
- trace("compile " + line)
- isDryRun || global.partestCompile(toArgs(line) map (categoryDir / _ path), false)
- }
-
- printPrompt
-
- /** Note - some res tests are really "neg" style tests, so we can't
- * use the return value of the compile. The diff catches failures.
- */
- true // results forall (_ == true)
- }
-
- override def compile() = compileResident(newGlobal(Nil)._1, residentCompilerCommands)
- }
- private[Res] def resFile(p: Path) = p.toFile addExtension "res"
- }
-
- object BuildManager extends DirBasedCategory("buildmanager") {
- lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
- override def denotesTest(p: Path) = p.isDirectory && testFile(p).isFile
- override def createTest(location: Path) = new BuildManagerTest(location.toDirectory)
-
- override def createSettings(entity: TestEntity): TestSettings =
- returning[TestSettings](super.createSettings(entity)) { settings =>
- settings.Ybuildmanagerdebug.value = true
- settings.sourcepath.value = entity.sourcesDir.path
- }
-
- class PartestBuildManager(settings: Settings, val reporter: ConsoleReporter) extends RefinedBuildManager(settings) {
- def errorFn(msg: String) = Console println msg
-
- override protected def newCompiler(newSettings: Settings) =
- new BuilderGlobal(newSettings, reporter)
-
- private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (s => Option(AbstractFile getFile (Path(settings.sourcepath.value) / s path))) toSet
-
- def buildManagerCompile(line: String): Boolean = {
- val prompt = "builder > "
- reporter printMessage (prompt + line)
- val command = new CompilerCommand(toArgs(line), settings)
- val files = filesToSet(settings.sourcepath.value, command.files)
-
- update(files, Set.empty)
- true
- }
- }
-
- private[BuildManager] def testFile(p: Path) = (p / p.name addExtension "test").toFile
-
- class BuildManagerTest(val location: Directory) extends TestEntity {
- val category = BuildManager
-
- override def sourcesDir = outDir
- override def sourceFiles = Path onlyFiles (location walkFilter (_ != changesDir) filter isJavaOrScala toList)
- override def checkFile = File(location / location.name addExtension "check")
-
- override def acknowledges(p: Path) = super.acknowledges(p) || (p isSame testFile(location))
-
- def buildManagerCommands = safeLines(testFile(location))
- def changesDir = Directory(location / (location.name + ".changes"))
-
- override def compile() = {
- val settings = createSettings(this)
- val pbm = new PartestBuildManager(settings, newReporter(settings))
-
- // copy files
- for (source <- sourceFiles) {
- val target = outDir / (location.normalize relativize source)
- copyPath(source, target.toFile)
- }
-
- def runUpdate(line: String) = {
- val Array(srcName, replacement) = line split "=>"
- copyPath(File(changesDir / replacement), File(outDir / srcName))
- }
-
- def sendCommand(line: String): Boolean = {
- val compileRegex = """^>>compile (.*)$""".r
- val updateRegex = """^>>update\s+(.*)""".r
- trace("send: " + (line drop 2))
-
- isDryRun || (line match {
- case compileRegex(xs) => pbm.buildManagerCompile(xs)
- case updateRegex(line) => runUpdate(line)
- })
- }
-
- // send each line to the build manager
- buildManagerCommands forall sendCommand
- }
- }
- }
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/category/Runner.scala b/src/partest-alternative/scala/tools/partest/category/Runner.scala
deleted file mode 100644
index add1c55feb..0000000000
--- a/src/partest-alternative/scala/tools/partest/category/Runner.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package category
-
-import nsc.io._
-
-trait Runner {
- self: Universe =>
-
- /** Shootout.
- */
- object Shootout extends DirBasedCategory("shootout") {
- lazy val testSequence: TestSequence = List(compile, run, diff)
-
- override def denotesTest(p: Path) = isScala(p) && runner(p).isFile
- override def createTest(location: Path) = new ShootoutTest(location.toFile)
-
- class ShootoutTest(val location: File) extends TestEntity {
- val category = Shootout
- // The files in shootout are very free form, so acknowledge anything close.
- override def acknowledges(p: Path) =
- (p.parent.normalize isSame Shootout.root) && (p.name startsWith label)
-
- private def generated = File(outDir / "test.scala")
- private def runnerFile = runner(location)
- override def sourceFiles = List(generated)
-
- override def compile() = {
- trace("generate %s from %s, %s".format(tracePath(generated), tracePath(location), tracePath(runnerFile)))
- // generate source file (even on dry run, we need the path)
- generated.writeAll(location.slurp(), runnerFile.slurp())
-
- // compile generated file
- super.compile()
- }
- }
-
- private[Shootout] def runner(p: Path) = p addExtension "runner" toFile
- }
-
- object Scalacheck extends DirBasedCategory("scalacheck") {
- lazy val testSequence: TestSequence = List(compile, run)
- override def createTest(location: Path) = new ScalacheckTest(location)
-
- class ScalacheckTest(val location: Path) extends TestEntity {
- val category = Scalacheck
-
- import build.{ scalacheck, forkjoin }
- import org.scalacheck.Properties
- import org.scalacheck.Test.{ checkProperties, defaultParams, Result }
-
- override def classpathPaths = super.classpathPaths ::: List(scalacheck, forkjoin)
- private def arrayURLs = Array(scalacheck, outDir) map (_.toURL)
-
- /** For reasons I'm not entirely clear on, I've written all this
- * to avoid a source dependency on scalacheck.
- */
- class ScalacheckClassLoader extends PartestClassLoader(arrayURLs, this.getClass.getClassLoader) {
- type ScalacheckResult = { def passed: Boolean }
-
- def propCallback(name: String, passed: Int, discarded: Int): Unit = ()
- def testCallback(name: String, result: AnyRef): Unit = ()
-
- val test = singleton("Test$")
- val params = apply[AnyRef]("org.scalacheck.Test$", "defaultParams")()
- val result = apply[Seq[(String, AnyRef)]]("org.scalacheck.Test$", "checkProperties")(test, params, propCallback _, testCallback _)
-
- def allResults() =
- for ((prop, res) <- result) yield {
- ScalacheckTest.this.trace("%s: %s".format(prop, res))
- res.asInstanceOf[ScalacheckResult].passed
- }
-
- def check() = allResults forall (_ == true)
- }
-
- override def run() = {
- trace("scalacheck runs via classloader with: %s".format(arrayURLs mkString ", "))
- isDryRun || (new ScalacheckClassLoader check)
- }
- }
- }
-
- object Script extends DirBasedCategory("script") {
- val testSequence: TestSequence = List(exec, diff)
- override def createTest(location: Path) = new ScriptTest(location)
-
- class ScriptTest(val location: Path) extends TestEntity {
- val category = Script
- val scriptFile = if (location.isDirectory) location / (label + ".scala") else location
- val argsFile = withExtension("args").toFile
- def batFile = scriptFile changeExtension "bat"
- def script = if (Properties.isWin) batFile else scriptFile
-
- override def acknowledges(p: Path) = super.acknowledges(p) || (List(argsFile, batFile) exists (_ isSame p))
- override def execCwd = Some(sourcesDir)
- override def argumentsToExec = script.path :: safeArgs(argsFile)
- }
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala b/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
deleted file mode 100644
index 59216cf03b..0000000000
--- a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-package io
-
-import java.io.{ Writer, PrintWriter, OutputStream, OutputStreamWriter }
-
-object ANSIWriter {
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- def apply(isAnsi: Boolean) = if (isAnsi) MANY else NONE
-}
-import ANSIWriter._
-
-class ANSIWriter(writer: Writer) extends PrintWriter(writer, true) {
- def this(out: OutputStream) = this(new OutputStreamWriter(out))
- def colorful: Int = NONE
-
- protected val manyColors = List(
- Console.BOLD + Console.BLACK,
- Console.BOLD + Console.GREEN,
- Console.BOLD + Console.RED,
- Console.BOLD + Console.YELLOW,
- Console.RESET
- )
- protected val someColors = List(
- Console.BOLD + Console.BLACK,
- Console.RESET,
- Console.BOLD + Console.BLACK,
- Console.BOLD + Console.BLACK,
- Console.RESET
- )
- protected val noColors = List("", "", "", "", "")
-
- lazy val List(_outline, _success, _failure, _warning, _default) = colorful match {
- case NONE => noColors
- case SOME => someColors
- case MANY => manyColors
- case _ => noColors
- }
-
- private def wrprint(msg: String): Unit = synchronized {
- print(msg)
- flush()
- }
-
- def outline(msg: String) = wrprint(_outline + msg + _default)
- def success(msg: String) = wrprint(_success + msg + _default)
- def failure(msg: String) = wrprint(_failure + msg + _default)
- def warning(msg: String) = wrprint(_warning + msg + _default)
- def normal(msg: String) = wrprint(_default + msg)
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/Diff.java b/src/partest-alternative/scala/tools/partest/io/Diff.java
deleted file mode 100644
index 69428d7e7a..0000000000
--- a/src/partest-alternative/scala/tools/partest/io/Diff.java
+++ /dev/null
@@ -1,873 +0,0 @@
-
-package scala.tools.partest.io;
-
-import java.util.Hashtable;
-
-/** A class to compare IndexedSeqs of objects. The result of comparison
- is a list of <code>change</code> objects which form an
- edit script. The objects compared are traditionally lines
- of text from two files. Comparison options such as "ignore
- whitespace" are implemented by modifying the <code>equals</code>
- and <code>hashcode</code> methods for the objects compared.
-<p>
- The basic algorithm is described in: </br>
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
- Algorithmica Vol. 1 No. 2, 1986, p 251.
-<p>
- This class outputs different results from GNU diff 1.15 on some
- inputs. Our results are actually better (smaller change list, smaller
- total size of changes), but it would be nice to know why. Perhaps
- there is a memory overwrite bug in GNU diff 1.15.
-
- @author Stuart D. Gathman, translated from GNU diff 1.15
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the <a href=COPYING.txt>
- GNU General Public License</a>
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
- */
-
-public class Diff {
-
- /** Prepare to find differences between two arrays. Each element of
- the arrays is translated to an "equivalence number" based on
- the result of <code>equals</code>. The original Object arrays
- are no longer needed for computing the differences. They will
- be needed again later to print the results of the comparison as
- an edit script, if desired.
- */
- public Diff(Object[] a,Object[] b) {
- Hashtable h = new Hashtable(a.length + b.length);
- filevec[0] = new file_data(a,h);
- filevec[1] = new file_data(b,h);
- }
-
- /** 1 more than the maximum equivalence value used for this or its
- sibling file. */
- private int equiv_max = 1;
-
- /** When set to true, the comparison uses a heuristic to speed it up.
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
- public boolean heuristic = false;
-
- /** When set to true, the algorithm returns a guarranteed minimal
- set of changes. This makes things slower, sometimes much slower. */
- public boolean no_discards = false;
-
- private int[] xvec, yvec; /* IndexedSeqs being compared. */
- private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the forward
- search of the edit matrix. */
- private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the backward
- search of the edit matrix. */
- private int fdiagoff, bdiagoff;
- private final file_data[] filevec = new file_data[2];
- private int cost;
-
- /** Find the midpoint of the shortest edit script for a specified
- portion of the two files.
-
- We scan from the beginnings of the files, and simultaneously from the ends,
- doing a breadth-first search through the space of edit-sequence.
- When the two searches meet, we have found the midpoint of the shortest
- edit sequence.
-
- The value returned is the number of the diagonal on which the midpoint lies.
- The diagonal number equals the number of inserted lines minus the number
- of deleted lines (counting only lines before the midpoint).
- The edit cost is stored into COST; this is the total number of
- lines inserted or deleted (counting only lines before the midpoint).
-
- This function assumes that the first lines of the specified portions
- of the two files do not match, and likewise that the last lines do not
- match. The caller must trim matching lines from the beginning and end
- of the portions it is going to specify.
-
- Note that if we return the "wrong" diagonal value, or if
- the value of bdiag at that diagonal is "wrong",
- the worst this can do is cause suboptimal diff output.
- It cannot cause incorrect diff output. */
-
- private int diag (int xoff, int xlim, int yoff, int ylim) {
- final int[] fd = fdiag; // Give the compiler a chance.
- final int[] bd = bdiag; // Additional help for the compiler.
- final int[] xv = xvec; // Still more help for the compiler.
- final int[] yv = yvec; // And more and more . . .
- final int dmin = xoff - ylim; // Minimum valid diagonal.
- final int dmax = xlim - yoff; // Maximum valid diagonal.
- final int fmid = xoff - yoff; // Center diagonal of top-down search.
- final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
- int fmin = fmid, fmax = fmid; // Limits of top-down search.
- int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
- /* True if southeast corner is on an odd
- diagonal with respect to the northwest. */
- final boolean odd = (fmid - bmid & 1) != 0;
-
- fd[fdiagoff + fmid] = xoff;
- bd[bdiagoff + bmid] = xlim;
-
- for (int c = 1;; ++c)
- {
- int d; /* Active diagonal. */
- boolean big_snake = false;
-
- /* Extend the top-down search by an edit step in each diagonal. */
- if (fmin > dmin)
- fd[fdiagoff + --fmin - 1] = -1;
- else
- ++fmin;
- if (fmax < dmax)
- fd[fdiagoff + ++fmax + 1] = -1;
- else
- --fmax;
- for (d = fmax; d >= fmin; d -= 2)
- {
- int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
-
- if (tlo >= thi)
- x = tlo + 1;
- else
- x = thi;
- oldx = x;
- y = x - d;
- while (x < xlim && y < ylim && xv[x] == yv[y]) {
- ++x; ++y;
- }
- if (x - oldx > 20)
- big_snake = true;
- fd[fdiagoff + d] = x;
- if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c - 1;
- return d;
- }
- }
-
- /* Similar extend the bottom-up search. */
- if (bmin > dmin)
- bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
- else
- ++bmin;
- if (bmax < dmax)
- bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
- else
- --bmax;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
-
- if (tlo < thi)
- x = tlo;
- else
- x = thi - 1;
- oldx = x;
- y = x - d;
- while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
- --x; --y;
- }
- if (oldx - x > 20)
- big_snake = true;
- bd[bdiagoff + d] = x;
- if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c;
- return d;
- }
- }
-
- /* Heuristic: check occasionally for a diagonal that has made
- lots of progress compared with the edit distance.
- If we have any such, find the one that has made the most
- progress and return it as if it had succeeded.
-
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
-
- if (c > 200 && big_snake && heuristic)
- {
- int best = 0;
- int bestpos = -1;
-
- for (d = fmax; d >= fmin; d -= 2)
- {
- int dd = d - fmid;
- if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if (fd[fdiagoff + d] * 2 - dd > best
- && fd[fdiagoff + d] - xoff > 20
- && fd[fdiagoff + d] - d - yoff > 20)
- {
- int k;
- int x = fd[fdiagoff + d];
-
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- for (k = 1; k <= 20; k++)
- if (xvec[x - k] != yvec[x - d - k])
- break;
-
- if (k == 21)
- {
- best = fd[fdiagoff + d] * 2 - dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
-
- best = 0;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int dd = d - bmid;
- if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
- && xlim - bd[bdiagoff + d] > 20
- && ylim - (bd[bdiagoff + d] - d) > 20)
- {
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- int k;
- int x = bd[bdiagoff + d];
-
- for (k = 0; k < 20; k++)
- if (xvec[x + k] != yvec[x - d + k])
- break;
- if (k == 20)
- {
- best = (xlim - bd[bdiagoff + d]) * 2 + dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
- }
- }
- }
-
- /** Compare in detail contiguous subsequences of the two files
- which are known, as a whole, to match each other.
-
- The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
- storing a 1 in the element for each line that is an insertion or deletion.
-
- The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
-
- Note that XLIM, YLIM are exclusive bounds.
- All line numbers are origin-0 and discarded lines are not counted. */
-
- private void compareseq (int xoff, int xlim, int yoff, int ylim) {
- /* Slide down the bottom initial diagonal. */
- while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
- ++xoff; ++yoff;
- }
- /* Slide up the top initial diagonal. */
- while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
- --xlim; --ylim;
- }
-
- /* Handle simple cases. */
- if (xoff == xlim)
- while (yoff < ylim)
- filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
- else if (yoff == ylim)
- while (xoff < xlim)
- filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
- else
- {
- /* Find a point of correspondence in the middle of the files. */
-
- int d = diag (xoff, xlim, yoff, ylim);
- int c = cost;
- int f = fdiag[fdiagoff + d];
- int b = bdiag[bdiagoff + d];
-
- if (c == 1)
- {
- /* This should be impossible, because it implies that
- one of the two subsequences is empty,
- and that case was handled above without calling `diag'.
- Let's verify that this is true. */
- throw new IllegalArgumentException("Empty subsequence");
- }
- else
- {
- /* Use that point to split this problem into two subproblems. */
- compareseq (xoff, b, yoff, b - d);
- /* This used to use f instead of b,
- but that is incorrect!
- It is not necessarily the case that diagonal d
- has a snake from b to f. */
- compareseq (b, xlim, b - d, ylim);
- }
- }
- }
-
- /** Discard lines from one file that have no matches in the other file.
- */
-
- private void discard_confusing_lines() {
- filevec[0].discard_confusing_lines(filevec[1]);
- filevec[1].discard_confusing_lines(filevec[0]);
- }
-
- private boolean inhibit = false;
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
- */
-
- private void shift_boundaries() {
- if (inhibit)
- return;
- filevec[0].shift_boundaries(filevec[1]);
- filevec[1].shift_boundaries(filevec[0]);
- }
-
- public interface ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script.
- @param changed0 true for lines in first file which do not match 2nd
- @param len0 number of lines in first file
- @param changed1 true for lines in 2nd file which do not match 1st
- @param len1 number of lines in 2nd file
- @return a linked list of changes - or null
- */
- public change build_script(
- boolean[] changed0,int len0,
- boolean[] changed1,int len1
- );
- }
-
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in reverse order. */
-
- static class ReverseScript implements ScriptBuilder {
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = 0, i1 = 0;
- while (i0 < len0 || i1 < len1) {
- if (changed0[1+i0] || changed1[1+i1]) {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[1+i0]) ++i0;
- while (changed1[1+i1]) ++i1;
-
- /* Record this change. */
- script = new change(line0, line1, i0 - line0, i1 - line1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0++; i1++;
- }
-
- return script;
- }
- }
-
- static class ForwardScript implements ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in forward order. */
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = len0, i1 = len1;
-
- while (i0 >= 0 || i1 >= 0)
- {
- if (changed0[i0] || changed1[i1])
- {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[i0]) --i0;
- while (changed1[i1]) --i1;
-
- /* Record this change. */
- script = new change(i0, i1, line0 - i0, line1 - i1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0--; i1--;
- }
-
- return script;
- }
- }
-
- /** Standard ScriptBuilders. */
- public final static ScriptBuilder
- forwardScript = new ForwardScript(),
- reverseScript = new ReverseScript();
-
- /* Report the differences of two files. DEPTH is the current directory
- depth. */
- public final change diff_2(final boolean reverse) {
- return diff(reverse ? reverseScript : forwardScript);
- }
-
- /** Get the results of comparison as an edit script. The script
- is described by a list of changes. The standard ScriptBuilder
- implementations provide for forward and reverse edit scripts.
- Alternate implementations could, for instance, list common elements
- instead of differences.
- @param bld an object to build the script from change flags
- @return the head of a list of changes
- */
- public change diff(final ScriptBuilder bld) {
-
- /* Some lines are obviously insertions or deletions
- because they don't match anything. Detect them now,
- and avoid even thinking about them in the main comparison algorithm. */
-
- discard_confusing_lines ();
-
- /* Now do the main comparison algorithm, considering just the
- undiscarded lines. */
-
- xvec = filevec[0].undiscarded;
- yvec = filevec[1].undiscarded;
-
- int diags =
- filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
- fdiag = new int[diags];
- fdiagoff = filevec[1].nondiscarded_lines + 1;
- bdiag = new int[diags];
- bdiagoff = filevec[1].nondiscarded_lines + 1;
-
- compareseq (0, filevec[0].nondiscarded_lines,
- 0, filevec[1].nondiscarded_lines);
- fdiag = null;
- bdiag = null;
-
- /* Modify the results slightly to make them prettier
- in cases where that can validly be done. */
-
- shift_boundaries ();
-
- /* Get the results of comparison in the form of a chain
- of `struct change's -- an edit script. */
- return bld.build_script(
- filevec[0].changed_flag,
- filevec[0].buffered_lines,
- filevec[1].changed_flag,
- filevec[1].buffered_lines
- );
-
- }
-
- /** The result of comparison is an "edit script": a chain of change objects.
- Each change represents one place where some lines are deleted
- and some are inserted.
-
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
-
- public static class change {
- /** Previous or next edit command. */
- public change link;
- /** # lines of file 1 changed here. */
- public final int inserted;
- /** # lines of file 0 changed here. */
- public final int deleted;
- /** Line number of 1st deleted line. */
- public final int line0;
- /** Line number of 1st inserted line. */
- public final int line1;
-
- /** Cons an additional entry onto the front of an edit script OLD.
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
- public change(int line0, int line1, int deleted, int inserted, change old) {
- this.line0 = line0;
- this.line1 = line1;
- this.inserted = inserted;
- this.deleted = deleted;
- this.link = old;
- //System.err.println(line0+","+line1+","+inserted+","+deleted);
- }
- }
-
- /** Data on one input file being compared.
- */
-
- class file_data {
-
- /** Allocate changed array for the results of comparison. */
- void clear() {
- /* Allocate a flag for each line of each file, saying whether that line
- is an insertion or deletion.
- Allocate an extra element, always zero, at each end of each IndexedSeq.
- */
- changed_flag = new boolean[buffered_lines + 2];
- }
-
- /** Return equiv_count[I] as the number of lines in this file
- that fall in equivalence class I.
- @return the array of equivalence class counts.
- */
- int[] equivCount() {
- int[] equiv_count = new int[equiv_max];
- for (int i = 0; i < buffered_lines; ++i)
- ++equiv_count[equivs[i]];
- return equiv_count;
- }
-
- /** Discard lines that have no matches in another file.
-
- A line which is discarded will not be considered by the actual
- comparison algorithm; it will be as if that line were not in the file.
- The file's `realindexes' table maps virtual line numbers
- (which don't count the discarded lines) into real line numbers;
- this is how the actual comparison algorithm produces results
- that are comprehensible when the discarded lines are counted.
-<p>
- When we discard a line, we also mark it as a deletion or insertion
- so that it will be printed in the output.
- @param f the other file
- */
- void discard_confusing_lines(file_data f) {
- clear();
- /* Set up table of which lines are going to be discarded. */
- final byte[] discarded = discardable(f.equivCount());
-
- /* Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
- filterDiscards(discarded);
-
- /* Actually discard the lines. */
- discard(discarded);
- }
-
- /** Mark to be discarded each line that matches no line of another file.
- If a line matches many lines, mark it as provisionally discardable.
- @see equivCount()
- @param counts The count of each equivalence number for the other file.
- @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
- for each line
- */
-
- private byte[] discardable(final int[] counts) {
- final int end = buffered_lines;
- final byte[] discards = new byte[end];
- final int[] equivs = this.equivs;
- int many = 5;
- int tem = end / 64;
-
- /* Multiply MANY by approximate square root of number of lines.
- That is the threshold for provisionally discardable lines. */
- while ((tem = tem >> 2) > 0)
- many *= 2;
-
- for (int i = 0; i < end; i++)
- {
- int nmatch;
- if (equivs[i] == 0)
- continue;
- nmatch = counts[equivs[i]];
- if (nmatch == 0)
- discards[i] = 1;
- else if (nmatch > many)
- discards[i] = 2;
- }
- return discards;
- }
-
- /** Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
-
- private void filterDiscards(final byte[] discards) {
- final int end = buffered_lines;
-
- for (int i = 0; i < end; i++)
- {
- /* Cancel provisional discards not in middle of run of discards. */
- if (discards[i] == 2)
- discards[i] = 0;
- else if (discards[i] != 0)
- {
- /* We have found a nonprovisional discard. */
- int j;
- int length;
- int provisional = 0;
-
- /* Find end of this run of discardable lines.
- Count how many are provisionally discardable. */
- for (j = i; j < end; j++)
- {
- if (discards[j] == 0)
- break;
- if (discards[j] == 2)
- ++provisional;
- }
-
- /* Cancel provisional discards at end, and shrink the run. */
- while (j > i && discards[j - 1] == 2) {
- discards[--j] = 0; --provisional;
- }
-
- /* Now we have the length of a run of discardable lines
- whose first and last are not provisional. */
- length = j - i;
-
- /* If 1/4 of the lines in the run are provisional,
- cancel discarding of all provisional lines in the run. */
- if (provisional * 4 > length)
- {
- while (j > i)
- if (discards[--j] == 2)
- discards[j] = 0;
- }
- else
- {
- int consec;
- int minimum = 1;
- int tem = length / 4;
-
- /* MINIMUM is approximate square root of LENGTH/4.
- A subrun of two or more provisionals can stand
- when LENGTH is at least 16.
- A subrun of 4 or more can stand when LENGTH >= 64. */
- while ((tem = tem >> 2) > 0)
- minimum *= 2;
- minimum++;
-
- /* Cancel any subrun of MINIMUM or more provisionals
- within the larger run. */
- for (j = 0, consec = 0; j < length; j++)
- if (discards[i + j] != 2)
- consec = 0;
- else if (minimum == ++consec)
- /* Back up to start of subrun, to cancel it all. */
- j -= consec;
- else if (minimum < consec)
- discards[i + j] = 0;
-
- /* Scan from beginning of run
- until we find 3 or more nonprovisionals in a row
- or until the first nonprovisional at least 8 lines in.
- Until that point, cancel any provisionals. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i + j] == 1)
- break;
- if (discards[i + j] == 2) {
- consec = 0; discards[i + j] = 0;
- }
- else if (discards[i + j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
-
- /* I advances to the last line of the run. */
- i += length - 1;
-
- /* Same thing, from end. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i - j] == 1)
- break;
- if (discards[i - j] == 2) {
- consec = 0; discards[i - j] = 0;
- }
- else if (discards[i - j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
- }
- }
- }
- }
-
- /** Actually discard the lines.
- @param discards flags lines to be discarded
- */
- private void discard(final byte[] discards) {
- final int end = buffered_lines;
- int j = 0;
- for (int i = 0; i < end; ++i)
- if (no_discards || discards[i] == 0)
- {
- undiscarded[j] = equivs[i];
- realindexes[j++] = i;
- }
- else
- changed_flag[1+i] = true;
- nondiscarded_lines = j;
- }
-
- file_data(Object[] data,Hashtable h) {
- buffered_lines = data.length;
-
- equivs = new int[buffered_lines];
- undiscarded = new int[buffered_lines];
- realindexes = new int[buffered_lines];
-
- for (int i = 0; i < data.length; ++i) {
- Integer ir = (Integer)h.get(data[i]);
- if (ir == null)
- h.put(data[i],new Integer(equivs[i] = equiv_max++));
- else
- equivs[i] = ir.intValue();
- }
- }
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
-
- We do something when a run of changed lines include a blank
- line at one end and have an excluded blank line at the other.
- We are free to choose which blank line is included.
- `compareseq' always chooses the one at the beginning,
- but usually it is cleaner to consider the following blank line
- to be the "change". The only exception is if the preceding blank line
- would join this change to other changes.
- @param f the file being compared against
- */
-
- void shift_boundaries(file_data f) {
- final boolean[] changed = changed_flag;
- final boolean[] other_changed = f.changed_flag;
- int i = 0;
- int j = 0;
- int i_end = buffered_lines;
- int preceding = -1;
- int other_preceding = -1;
-
- for (;;)
- {
- int start, end, other_start;
-
- /* Scan forwards to find beginning of another run of changes.
- Also keep track of the corresponding point in the other file. */
-
- while (i < i_end && !changed[1+i])
- {
- while (other_changed[1+j++])
- /* Non-corresponding lines in the other file
- will count as the preceding batch of changes. */
- other_preceding = j;
- i++;
- }
-
- if (i == i_end)
- break;
-
- start = i;
- other_start = j;
-
- for (;;)
- {
- /* Now find the end of this run of changes. */
-
- while (i < i_end && changed[1+i]) i++;
- end = i;
-
- /* If the first changed line matches the following unchanged one,
- and this run does not follow right after a previous run,
- and there are no lines deleted from the other file here,
- then classify the first changed line as unchanged
- and the following line as changed in its place. */
-
- /* You might ask, how could this run follow right after another?
- Only because the previous run was shifted here. */
-
- if (end != i_end
- && equivs[start] == equivs[end]
- && !other_changed[1+j]
- && end != i_end
- && !((preceding >= 0 && start == preceding)
- || (other_preceding >= 0
- && other_start == other_preceding)))
- {
- changed[1+end++] = true;
- changed[1+start++] = false;
- ++i;
- /* Since one line-that-matches is now before this run
- instead of after, we must advance in the other file
- to keep in synch. */
- ++j;
- }
- else
- break;
- }
-
- preceding = i;
- other_preceding = j;
- }
- }
-
- /** Number of elements (lines) in this file. */
- final int buffered_lines;
-
- /** IndexedSeq, indexed by line number, containing an equivalence code for
- each line. It is this IndexedSeq that is actually compared with that
- of another file to generate differences. */
- private final int[] equivs;
-
- /** IndexedSeq, like the previous one except that
- the elements for discarded lines have been squeezed out. */
- final int[] undiscarded;
-
- /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
- to real ones (counting those lines). Both are origin-0. */
- final int[] realindexes;
-
- /** Total number of nondiscarded lines. */
- int nondiscarded_lines;
-
- /** Array, indexed by real origin-1 line number,
- containing true for a line that is an insertion or a deletion.
- The results of comparison are stored here. */
- boolean[] changed_flag;
-
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java b/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
deleted file mode 100644
index 273b6cba52..0000000000
--- a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
+++ /dev/null
@@ -1,606 +0,0 @@
-
-package scala.tools.partest.io;
-
-import java.io.*;
-import java.util.Vector;
-import java.util.Date;
-//import com.objectspace.jgl.predicates.UnaryPredicate;
-
-interface UnaryPredicate {
- boolean execute(Object obj);
-}
-
-/** A simple framework for printing change lists produced by <code>Diff</code>.
- @see bmsi.util.Diff
- @author Stuart D. Gathman
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-public class DiffPrint {
- /** A Base class for printing edit scripts produced by Diff.
- This class divides the change list into "hunks", and calls
- <code>print_hunk</code> for each hunk. Various utility methods
- are provided as well.
- */
- public static abstract class Base {
- protected Base(Object[] a,Object[] b, Writer w) {
- outfile = new PrintWriter(w);
- file0 = a;
- file1 = b;
- }
- /** Set to ignore certain kinds of lines when printing
- an edit script. For example, ignoring blank lines or comments.
- */
- protected UnaryPredicate ignore = null;
-
- /** Set to the lines of the files being compared.
- */
- protected Object[] file0, file1;
-
- /** Divide SCRIPT into pieces by calling HUNKFUN and
- print each piece with PRINTFUN.
- Both functions take one arg, an edit script.
-
- PRINTFUN takes a subscript which belongs together (with a null
- link at the end) and prints it. */
- public void print_script(Diff.change script) {
- Diff.change next = script;
-
- while (next != null)
- {
- Diff.change t, end;
-
- /* Find a set of changes that belong together. */
- t = next;
- end = hunkfun(next);
-
- /* Disconnect them from the rest of the changes,
- making them a hunk, and remember the rest for next iteration. */
- next = end.link;
- end.link = null;
- //if (DEBUG)
- // debug_script(t);
-
- /* Print this hunk. */
- print_hunk(t);
-
- /* Reconnect the script so it will all be freed properly. */
- end.link = next;
- }
- outfile.flush();
- }
-
- /** Called with the tail of the script
- and returns the last link that belongs together with the start
- of the tail. */
-
- protected Diff.change hunkfun(Diff.change hunk) {
- return hunk;
- }
-
- protected int first0, last0, first1, last1, deletes, inserts;
- protected PrintWriter outfile;
-
- /** Look at a hunk of edit script and report the range of lines in each file
- that it applies to. HUNK is the start of the hunk, which is a chain
- of `struct change'. The first and last line numbers of file 0 are stored
- in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
- Note that these are internal line numbers that count from 0.
-
- If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
-
- Also set *DELETES nonzero if any lines of file 0 are deleted
- and set *INSERTS nonzero if any lines of file 1 are inserted.
- If only ignorable lines are inserted or deleted, both are
- set to 0. */
-
- protected void analyze_hunk(Diff.change hunk) {
- int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
- int i;
- Diff.change next;
- boolean nontrivial = (ignore == null);
-
- show_from = show_to = 0;
-
- f0 = hunk.line0;
- f1 = hunk.line1;
-
- for (next = hunk; next != null; next = next.link)
- {
- l0 = next.line0 + next.deleted - 1;
- l1 = next.line1 + next.inserted - 1;
- show_from += next.deleted;
- show_to += next.inserted;
- for (i = next.line0; i <= l0 && ! nontrivial; i++)
- if (!ignore.execute(file0[i]))
- nontrivial = true;
- for (i = next.line1; i <= l1 && ! nontrivial; i++)
- if (!ignore.execute(file1[i]))
- nontrivial = true;
- }
-
- first0 = f0;
- last0 = l0;
- first1 = f1;
- last1 = l1;
-
- /* If all inserted or deleted lines are ignorable,
- tell the caller to ignore this hunk. */
-
- if (!nontrivial)
- show_from = show_to = 0;
-
- deletes = show_from;
- inserts = show_to;
- }
-
- /** Print the script header which identifies the files compared. */
- protected void print_header(String filea, String fileb) { }
-
- protected abstract void print_hunk(Diff.change hunk);
-
- protected void print_1_line(String pre,Object linbuf) {
- outfile.println(pre + linbuf.toString());
- }
-
- /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
- If the two numbers are identical, print just one number.
-
- Args A and B are internal line numbers.
- We print the translated (real) line numbers. */
-
- protected void print_number_range (char sepchar, int a, int b) {
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (++b > ++a)
- outfile.print("" + a + sepchar + b);
- else
- outfile.print(b);
- }
-
- public static char change_letter(int inserts, int deletes) {
- if (inserts == 0)
- return 'd';
- else if (deletes == 0)
- return 'a';
- else
- return 'c';
- }
- }
-
- /** Print a change list in the standard diff format.
- */
- public static class NormalPrint extends Base {
-
- public NormalPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of a normal diff.
- This is a contiguous portion of a complete edit script,
- describing changes in consecutive lines. */
-
- protected void print_hunk (Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk(hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.print(change_letter(inserts, deletes));
- print_number_range (',', first1, last1);
- outfile.println();
-
- /* Print the lines that the first file has. */
- if (deletes != 0)
- for (int i = first0; i <= last0; i++)
- print_1_line ("< ", file0[i]);
-
- if (inserts != 0 && deletes != 0)
- outfile.println("---");
-
- /* Print the lines that the second file has. */
- if (inserts != 0)
- for (int i = first1; i <= last1; i++)
- print_1_line ("> ", file1[i]);
- }
- }
-
- /** Prints an edit script in a format suitable for input to <code>ed</code>.
- The edit script must be generated with the reverse option to
- be useful as actual <code>ed</code> input.
- */
- public static class EdPrint extends Base {
-
- public EdPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of an ed diff */
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.println(change_letter(inserts, deletes));
-
- /* Print new/changed lines from second file, if needed */
- if (inserts != 0)
- {
- boolean inserting = true;
- for (int i = first1; i <= last1; i++)
- {
- /* Resume the insert, if we stopped. */
- if (! inserting)
- outfile.println(i - first1 + first0 + "a");
- inserting = true;
-
- /* If the file's line is just a dot, it would confuse `ed'.
- So output it with a double dot, and set the flag LEADING_DOT
- so that we will output another ed-command later
- to change the double dot into a single dot. */
-
- if (".".equals(file1[i]))
- {
- outfile.println("..");
- outfile.println(".");
- /* Now change that double dot to the desired single dot. */
- outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
- inserting = false;
- }
- else
- /* Line is not `.', so output it unmodified. */
- print_1_line ("", file1[i]);
- }
-
- /* End insert mode, if we are still in it. */
- if (inserting)
- outfile.println(".");
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class ContextPrint extends Base {
-
- protected int context = 3;
-
- public ContextPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- protected void print_context_label (String mark, File inf, String label) {
- if (label != null)
- outfile.println(mark + ' ' + label);
- else if (inf.lastModified() > 0)
- // FIXME: use DateFormat to get precise format needed.
- outfile.println(
- mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
- );
- else
- /* Don't pretend that standard input is ancient. */
- outfile.println(mark + ' ' + inf.getPath());
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("***", new File(filea), filea);
- print_context_label ("---", new File(fileb), fileb);
- }
-
- /** If function_regexp defined, search for start of function. */
- private String find_function(Object[] lines, int start) {
- return null;
- }
-
- protected void print_function(Object[] file,int start) {
- String function = find_function (file0, first0);
- if (function != null) {
- outfile.print(" ");
- outfile.print(
- (function.length() < 40) ? function : function.substring(0,40)
- );
- }
- }
-
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
-
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
- outfile.print("***************");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function (file0, first0);
-
- outfile.println();
- outfile.print("*** ");
- print_number_range (',', first0, last0);
- outfile.println(" ****");
-
- if (deletes != 0) {
- Diff.change next = hunk;
-
- for (int i = first0; i <= last0; i++) {
- /* Skip past changes that apply (in file 0)
- only to lines before line I. */
-
- while (next != null && next.line0 + next.deleted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line0 <= i)
- /* The change NEXT covers this line.
- If lines were inserted here in file 1, this is "changed".
- Otherwise it is "deleted". */
- prefix = (next.inserted > 0) ? "!" : "-";
-
- print_1_line (prefix, file0[i]);
- }
- }
-
- outfile.print("--- ");
- print_number_range (',', first1, last1);
- outfile.println(" ----");
-
- if (inserts != 0) {
- Diff.change next = hunk;
-
- for (int i = first1; i <= last1; i++) {
- /* Skip past changes that apply (in file 1)
- only to lines before line I. */
-
- while (next != null && next.line1 + next.inserted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line1 <= i)
- /* The change NEXT covers this line.
- If lines were deleted here in file 0, this is "changed".
- Otherwise it is "inserted". */
- prefix = (next.deleted > 0) ? "!" : "+";
-
- print_1_line (prefix, file1[i]);
- }
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class UnifiedPrint extends ContextPrint {
-
- public UnifiedPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("---", new File(filea), filea);
- print_context_label ("+++", new File(fileb), fileb);
- }
-
- private void print_number_range (int a, int b) {
- //translate_range (file, a, b, &trans_a, &trans_b);
-
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (b < a)
- outfile.print(b + ",0");
- else
- super.print_number_range(',',a,b);
- }
-
- protected void print_hunk(Diff.change hunk) {
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
-
- outfile.print("@@ -");
- print_number_range (first0, last0);
- outfile.print(" +");
- print_number_range (first1, last1);
- outfile.print(" @@");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function(file0,first0);
-
- outfile.println();
-
- Diff.change next = hunk;
- int i = first0;
- int j = first1;
-
- while (i <= last0 || j <= last1) {
-
- /* If the line isn't a difference, output the context from file 0. */
-
- if (next == null || i < next.line0) {
- outfile.print(' ');
- print_1_line ("", file0[i++]);
- j++;
- }
- else {
- /* For each difference, first output the deleted part. */
-
- int k = next.deleted;
- while (k-- > 0) {
- outfile.print('-');
- print_1_line ("", file0[i++]);
- }
-
- /* Then output the inserted part. */
-
- k = next.inserted;
- while (k-- > 0) {
- outfile.print('+');
- print_1_line ("", file1[j++]);
- }
-
- /* We're done with this hunk, so on to the next! */
-
- next = next.link;
- }
- }
- }
- }
-
-
- /** Read a text file into an array of String. This provides basic diff
- functionality. A more advanced diff utility will use specialized
- objects to represent the text lines, with options to, for example,
- convert sequences of whitespace to a single space for comparison
- purposes.
- */
- static String[] slurp(String file) throws IOException {
- BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector s = new Vector();
- for (;;) {
- String line = rdr.readLine();
- if (line == null) break;
- s.addElement(line);
- }
- String[] a = new String[s.size()];
- s.copyInto(a);
- return a;
- }
-
- public static void main(String[] argv) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- System.err.println("No differences");
- else {
- Base p;
- Writer w = new OutputStreamWriter(System.out);
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
- public static void doDiff(String[] argv, Writer w) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- w.write("No differences\n");
- else {
- Base p;
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala b/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
deleted file mode 100644
index ddb3bc23fd..0000000000
--- a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package io
-
-/** This is disabled for the moment but I can fix it up if anyone
- * is using it.
- */
-class JUnitReport {
- // create JUnit Report xml files if directory was specified
- // def junitReport(dir: Directory) = {
- // dir.mkdir()
- // val report = testReport(set.kind, results, succs, fails)
- // XML.save("%s/%s.xml".format(d.toAbsolute.path, set.kind), report)
- // }
-
- // def oneResult(res: (TestEntity, Int)) =
- // <testcase name={res._1.path}>{
- // res._2 match {
- // case 0 => scala.xml.NodeSeq.Empty
- // case 1 => <failure message="Test failed"/>
- // case 2 => <failure message="Test timed out"/>
- // }
- // }</testcase>
- //
- // def testReport(kind: String, results: Iterable[(TestEntity, Int)], succs: Int, fails: Int) = {
- // <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- // <properties/>
- // {
- // results.map(oneResult(_))
- // }
- // </testsuite>
- // }
- //
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/Logging.scala b/src/partest-alternative/scala/tools/partest/io/Logging.scala
deleted file mode 100644
index 52239ffb2c..0000000000
--- a/src/partest-alternative/scala/tools/partest/io/Logging.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-package scala.tools
-package partest
-package io
-
-import java.io.{ StringWriter, PrintWriter, Writer }
-import scala.tools.nsc.io._
-import scala.util.control.ControlThrowable
-
-trait Logging {
- universe: Universe =>
-
- class PartestANSIWriter extends ANSIWriter(Console.out) {
- override def colorful: Int = ANSIWriter(universe.isAnsi)
- private def printIf(cond: Boolean, msg: String) =
- if (cond) { outline("debug: ") ; println(msg) }
-
- val verbose = printIf(isVerbose || isDebug, _: String)
- val debug = printIf(isDebug, _: String)
- }
-
- lazy val NestUI = new PartestANSIWriter()
-
- import NestUI.{ _outline, _success, _failure, _warning, _default }
-
- def markOutline(msg: String) = _outline + msg + _default
- def markSuccess(msg: String) = _success + msg + _default
- def markFailure(msg: String) = _failure + msg + _default
- def markWarning(msg: String) = _warning + msg + _default
- def markNormal(msg: String) = _default + msg
-
- def outline(msg: String) = NestUI outline msg
- def success(msg: String) = NestUI success msg
- def failure(msg: String) = NestUI failure msg
- def warning(msg: String) = NestUI warning msg
- def normal(msg: String) = NestUI normal msg
-
- def verbose(msg: String) = NestUI verbose msg
- def debug(msg: String) = NestUI debug msg
-
- trait EntityLogging {
- self: TestEntity =>
-
- lazy val logWriter = new LogWriter(logFile)
-
- /** Redirect stdout and stderr to logFile, run body, return result.
- */
- def loggingOutAndErr[T](body: => T): T = {
- val log = logFile.printStream(append = true)
-
- try Console.withOut(log) {
- Console.withErr(log) {
- body
- }
- }
- finally log.close()
- }
-
- /** What to print in a failure summary.
- */
- def failureMessage() = if (diffOutput != "") diffOutput else safeSlurp(logFile)
-
- /** For tracing. Outputs a line describing the next action. tracePath
- * is a path wrapper which prints name or full path depending on verbosity.
- */
- def trace(msg: String) = if (isTrace || isDryRun) System.err.println(">> [%s] %s".format(label, msg))
-
- def tracePath(path: Path): String = if (isVerbose) path.path else path.name
- def tracePath(path: String): String = tracePath(Path(path))
-
- /** v == verbose.
- */
- def vtrace(msg: String) = if (isVerbose) trace(msg)
-
- /** Run body, writes result to logFile. Any throwable is
- * caught, stringified, and written to the log.
- */
- def loggingResult(body: => String) =
- try returning(true)(_ => logFile writeAll body)
- catch {
- case x: ControlThrowable => throw x
- case x: InterruptedException => debug(this + " received interrupt, failing.\n") ; false
- case x: Throwable => logException(x)
- }
-
- def throwableToString(x: Throwable): String = {
- val w = new StringWriter
- x.printStackTrace(new PrintWriter(w))
- w.toString
- }
-
- def warnAndLog(str: String) = {
- warning(toStringTrunc(str, 800))
- logWriter append str
- }
-
- def warnAndLogException(msg: String, ex: Throwable) =
- warnAndLog(msg + throwableToString(ex))
-
- def deleteLog(force: Boolean = false) =
- if (universe.isNoCleanup && !force) debug("Not cleaning up " + logFile)
- else logFile.deleteIfExists()
-
- def onException(x: Throwable) { logException(x) }
- def logException(x: Throwable) = {
- val msg = throwableToString(x)
- if (!isTerse)
- normal(msg)
-
- logWriter append msg
- false
- }
- }
-
- /** A writer which doesn't create the file until a write comes in.
- */
- class LazilyCreatedWriter(log: File) extends Writer {
- @volatile private var isCreated = false
- private lazy val underlying = {
- isCreated = true
- log.bufferedWriter()
- }
-
- def flush() = if (isCreated) underlying.flush()
- def close() = if (isCreated) underlying.close()
- def write(chars: Array[Char], off: Int, len: Int) = {
- underlying.write(chars, off, len)
- underlying.flush()
- }
- }
-
- class LogWriter(log: File) extends PrintWriter(new LazilyCreatedWriter(log), true) {
- override def print(s: String) = {
- super.print(s)
- flush()
- }
- }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/package.scala b/src/partest-alternative/scala/tools/partest/package.scala
deleted file mode 100644
index 9c515aa2f4..0000000000
--- a/src/partest-alternative/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-
-import nsc.io.{ File, Path, Process, Directory }
-import java.nio.charset.CharacterCodingException
-
-package object partest {
- /** The CharacterCodingExceptions are thrown at least on windows trying
- * to read a file like script/utf-8.scala
- */
- private[partest] def safeSlurp(f: File) =
- try if (f.exists) f.slurp() else ""
- catch { case _: CharacterCodingException => "" }
-
- private[partest] def safeLines(f: File) = safeSlurp(f) split """\r\n|\r|\n""" toList
- private[partest] def safeArgs(f: File) = toArgs(safeSlurp(f))
- private[partest] def isJava(f: Path) = f.isFile && (f hasExtension "java")
- private[partest] def isScala(f: Path) = f.isFile && (f hasExtension "scala")
- private[partest] def isJavaOrScala(f: Path) = isJava(f) || isScala(f)
-
- private[partest] def toArgs(line: String) = cmd toArgs line
- private[partest] def fromArgs(args: List[String]) = cmd fromArgs args
-
- /** Strings, argument lists, etc. */
-
- private[partest] def fromAnyArgs(args: List[Any]) = args mkString " " // separate to avoid accidents
- private[partest] def toStringTrunc(x: Any, max: Int = 240) = {
- val s = x.toString
- if (s.length < max) s
- else (s take max) + " [...]"
- }
- private[partest] def setProp(k: String, v: String) = scala.util.Properties.setProp(k, v)
-
- /** Pretty self explanatory. */
- def printAndExit(msg: String): Unit = {
- println(msg)
- exit(1)
- }
-
- /** Apply a function and return the passed value */
- def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
-} \ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/util/package.scala b/src/partest-alternative/scala/tools/partest/util/package.scala
deleted file mode 100644
index c34d641db1..0000000000
--- a/src/partest-alternative/scala/tools/partest/util/package.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import java.util.{ Timer, TimerTask }
-import java.io.StringWriter
-import nsc.io._
-
-/** Misc code still looking for a good home.
- */
-package object util {
-
- def allPropertiesString() = javaHashtableToString(System.getProperties)
-
- private def javaHashtableToString(table: java.util.Hashtable[_,_]) = {
- import collection.JavaConversions._
- (table.toList map { case (k, v) => "%s -> %s\n".format(k, v) }).sorted mkString
- }
-
- def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (x => Option(AbstractFile getFile (Path(pre) / x).path)) toSet
-
- /** Copies one Path to another Path, trying to be sensible when one or the
- * other is a Directory. Returns true if it believes it succeeded.
- */
- def copyPath(from: Path, to: Path): Boolean = {
- if (!to.parent.isDirectory)
- to.parent.createDirectory(force = true)
-
- def copyDir = {
- val sub = to / from.name createDirectory true
- from.toDirectory.list forall (x => copyPath(x, sub))
- }
- (from.isDirectory, to.isDirectory) match {
- case (true, true) => copyDir
- case (true, false) => false
- case (false, true) => from.toFile copyTo (to / from.name)
- case (false, false) => from.toFile copyTo to
- }
- }
-
- /**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the text difference between the compared files
- */
- def diffFiles(f1: File, f2: File): String = {
- val diffWriter = new StringWriter
- val args = Array(f1.toAbsolute.path, f2.toAbsolute.path)
-
- io.DiffPrint.doDiff(args, diffWriter)
- val result = diffWriter.toString
- if (result == "No differences") "" else result
- }
-}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
new file mode 100644
index 0000000000..dd06c051a4
--- /dev/null
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc._
+
+/** For testing compiler internals directly.
+ * Each source code string in "sources" will be compiled, and
+ * the check function will be called with the source code and the
+ * resulting CompilationUnit. The check implementation should
+ * test for what it wants to test and fail (via assert or other
+ * exception) if it is not happy.
+ */
+abstract class CompilerTest extends DirectTest {
+ def check(source: String, unit: global.CompilationUnit): Unit
+
+ lazy val global: Global = newCompiler()
+ lazy val units = compilationUnits(global)(sources: _ *)
+
+ override def extraSettings = "-usejavacp -d " + testOutput.path
+
+ def sources: List[String] = List(code)
+ def show() = (sources, units).zipped foreach check
+}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index be8cac9147..74f511aa4e 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -35,13 +35,37 @@ abstract class DirectTest extends App {
s processArguments (allArgs, true)
s
}
- // compile the code, optionally first adding to the settings
- def compile(args: String*) = {
+ // new compiler
+ def newCompiler(args: String*): Global = {
val settings = newSettings((CommandLineParser tokenize extraSettings) ++ args.toList)
- val global = new Global(settings)
- new global.Run compileSources List(new BatchSourceFile("<partest>", code))
+ new Global(settings)
+ }
+ def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src)
+ }
+ def compileString(global: Global)(sourceCode: String): Boolean = {
+ withRun(global)(_ compileSources newSources(sourceCode))
!global.reporter.hasErrors
}
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = withRun(global) { run =>
+ run compileSources newSources(sourceCodes: _*)
+ run.units.toList
+ }
+ if (global.reporter.hasErrors) {
+ global.reporter.flush()
+ sys.error("Compilation failure.")
+ }
+ units
+ }
+
+ def withRun[T](global: Global)(f: global.Run => T): T = {
+ global.reporter.reset()
+ f(new global.Run)
+ }
+
+ // compile the code, optionally first adding to the settings
+ def compile(args: String*) = compileString(newCompiler(args: _*))(code)
/** Constructor/main body **/
try show()
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index a08100a33f..524dc06327 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -15,10 +15,8 @@ import scala.tools.nsc.io.{ Directory, Path => SPath }
import nsc.util.ClassPath
import util.PathResolver
import scala.tools.ant.sabbus.CompilationPathProperty
-
import java.io.File
import java.lang.reflect.Method
-
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
@@ -281,6 +279,26 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala library.")
+ val scalaCompiler = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-compiler.jar" => true
+ case "compiler" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala compiler.")
+
+ val scalaPartest = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-partest.jar" => true
+ case "partest" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala partest.")
+
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
@@ -289,11 +307,23 @@ class PartestTask extends Task with CompilationPathProperty {
val antRunner = new scala.tools.partest.nest.AntRunner
val antFileManager = antRunner.fileManager
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, this paragraph of code can be safely removed
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
+ val path = new org.apache.tools.ant.types.Path(getProject())
+ val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
+ path.setPath(newClassPath)
+ loader.setClassPath(path)
+
antFileManager.showDiff = showDiff
antFileManager.showLog = showLog
antFileManager.failed = runFailed
antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
+ antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
+ antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
@@ -361,18 +391,18 @@ class PartestTask extends Task with CompilationPathProperty {
private def oneResult(res: (String, Int)) =
<testcase name={res._1}>{
- res._2 match {
- case 0 => scala.xml.NodeSeq.Empty
+ res._2 match {
+ case 0 => scala.xml.NodeSeq.Empty
case 1 => <failure message="Test failed"/>
case 2 => <failure message="Test timed out"/>
- }
- }</testcase>
+ }
+ }</testcase>
private def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
<testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- <properties/>
- {
- results.map(oneResult(_))
- }
+ <properties/>
+ {
+ results.map(oneResult(_))
+ }
</testsuite>
}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 002e454b7b..4795e5551a 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -20,6 +20,8 @@ class AntRunner extends DirectRunner {
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 68688ff949..7aaa7bab00 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -12,6 +12,7 @@ import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
import scala.tools.nsc.interactive.RangePositions
import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
import scala.tools.nsc.util.{ ClassPath, FakePos }
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
import scala.tools.util.PathResolver
import io.Path
import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
@@ -26,7 +27,7 @@ class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
deprecation.value = true
nowarnings.value = false
- encoding.value = "ISO-8859-1"
+ encoding.value = "UTF-8"
classpath.value = cp
}
@@ -111,6 +112,8 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
try {
NestUI.verbose("compiling "+toCompile)
+ NestUI.verbose("with classpath: "+global.classPath.toString)
+ NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
try new global.Run compile toCompile
catch {
case FatalError(msg) =>
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index b9b371d6cb..3d72227b04 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -157,9 +157,13 @@ class ConsoleFileManager extends FileManager {
}
LATEST_LIB = latestLibFile.getAbsolutePath
+ LATEST_COMP = latestCompFile.getAbsolutePath
+ LATEST_PARTEST = latestPartestFile.getAbsolutePath
}
var LATEST_LIB: String = ""
+ var LATEST_COMP: String = ""
+ var LATEST_PARTEST: String = ""
var latestFile: File = _
var latestLibFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 78184664c2..d3d50ca58c 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -52,8 +52,15 @@ trait DirectRunner {
val kindFiles = onlyValidTestPaths(_kindFiles)
val groupSize = (kindFiles.length / numActors) + 1
- val consFM = new ConsoleFileManager
- import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
+ // @partest maintainer: we cannot create a fresh file manager here
+ // since the FM must respect --buildpath and --classpath from the command line
+ // for example, see how it's done in ReflectiveRunner
+ //val consFM = new ConsoleFileManager
+ //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
+ val latestCompFile = new File(fileManager.LATEST_COMP);
+ val latestLibFile = new File(fileManager.LATEST_LIB);
+ val latestPartestFile = new File(fileManager.LATEST_PARTEST);
+
val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
List(scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestPartestFile.toURI.toURL)
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 780f7a35e5..a4a94fe93e 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -60,6 +60,8 @@ trait FileManager extends FileUtil {
var CLASSPATH: String
var LATEST_LIB: String
+ var LATEST_COMP: String
+ var LATEST_PARTEST: String
var showDiff = false
var updateCheck = false
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index f6353faa6f..e0a2f65b80 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -16,6 +16,9 @@ object PathSettings {
private def cwd = Directory.Current getOrElse sys.error("user.dir property not set")
private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
+ private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
+ private def findJar(files: Iterator[File], name: String): Option[File] =
+ files filter (_ hasExtension "jar") find { _.name startsWith name }
// Directory <root>/test
lazy val testRoot: Directory = testRootDir getOrElse {
@@ -33,10 +36,17 @@ object PathSettings {
// Directory <root>/test/files/speclib
lazy val srcSpecLibDir = Directory(srcDir / "speclib")
- lazy val srcSpecLib: File = srcSpecLibDir.files find (_.name startsWith "instrumented") getOrElse {
+ lazy val srcSpecLib: File = findJar(srcSpecLibDir, "instrumented") getOrElse {
sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
}
+ // Directory <root>/test/files/codelib
+ lazy val srcCodeLibDir = Directory(srcDir / "codelib")
+
+ lazy val srcCodeLib: File = findJar(srcCodeLibDir, "code") getOrElse {
+ sys.error("No code.jar found in %s".format(srcCodeLibDir))
+ }
+
// Directory <root>/build
lazy val buildDir: Directory = {
val bases = testRoot :: testRoot.parents
@@ -51,7 +61,7 @@ object PathSettings {
lazy val buildPackLibDir = Directory(buildDir / "pack" / "lib")
lazy val scalaCheck: File =
- buildPackLibDir.files ++ srcLibDir.files find (_.name startsWith "scalacheck") getOrElse {
+ findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index f39debf31d..5cde63dc81 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -53,15 +53,28 @@ class ReflectiveRunner {
Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
- val sepLoader = new URLClassLoader(sepUrls, null)
+ var sepLoader = new URLClassLoader(sepUrls, null)
+
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, this paragraph of code can be safely removed
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
- val paths = classPath match {
- case Some(cp) => Nil
- case _ => files.toList map (_.path)
- }
+ // @partest maintainer: it seems to me that commented lines are incorrect
+ // if classPath is not empty, then it has been provided by the --classpath option
+ // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
+ // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
+ //
+ //val paths = classPath match {
+ // case Some(cp) => Nil
+ // case _ => files.toList map (_.path)
+ //}
+ val paths = files.toList map (_.path)
+
val newClasspath = ClassPath.join(paths: _*)
setProp("java.class.path", newClasspath)
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index ae54e51761..4c6f417df5 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -7,16 +7,18 @@ import scala.util.Properties.setProp
object SBTRunner extends DirectRunner {
-
+
val fileManager = new FileManager {
var JAVACMD: String = "java"
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
-
+
def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.HashMap[String,Int] = {
def convert(scalaM:scala.collection.immutable.Map[String,Int]):java.util.HashMap[String,Int] = {
val javaM = new java.util.HashMap[String,Int]()
@@ -25,9 +27,9 @@ object SBTRunner extends DirectRunner {
}
def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
+ if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
- convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
}
case class CommandLineOptions(classpath: Option[String] = None,
@@ -38,7 +40,7 @@ object SBTRunner extends DirectRunner {
def mainReflect(args: Array[String]): java.util.Map[String,Int] = {
setProp("partest.debug", "true")
setProperties()
-
+
val Argument = new scala.util.matching.Regex("-(.*)")
def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
@@ -50,10 +52,14 @@ object SBTRunner extends DirectRunner {
}
val config = parseArgs(args, CommandLineOptions())
fileManager.SCALAC_OPTS = config.scalacOptions
- fileManager.CLASSPATH = config.classpath getOrElse error("No classpath set")
+ fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
// Find scala library jar file...
val lib: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-library.*\\.jar")).headOption
- fileManager.LATEST_LIB = lib getOrElse error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_LIB = lib getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ val comp: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-compiler.*\\.jar")).headOption
+ fileManager.LATEST_COMP = comp getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
+ val partest: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-partest.*\\.jar")).headOption
+ fileManager.LATEST_PARTEST = partest getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
// TODO - Do something useful here!!!
fileManager.JAVAC_CMD = "javac"
fileManager.failed = config.justFailedTests
@@ -63,7 +69,7 @@ object SBTRunner extends DirectRunner {
val runs = config.tests.filterNot(_._2.isEmpty)
// This next bit uses java maps...
import collection.JavaConverters._
- (for {
+ (for {
(testType, files) <- runs
(path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
} yield (path, result)).seq asJava
@@ -71,14 +77,12 @@ object SBTRunner extends DirectRunner {
def main(args: Array[String]): Unit = {
import collection.JavaConverters._
- val failures = for {
- (path, result) <- mainReflect(args).asScala
- if result == 1 || result == 2
- val resultName = (if(result == 1) " [FAILED]" else " [TIMEOUT]")
- } yield path + resultName
+ val failures = (
+ for ((path, result) <- mainReflect(args).asScala ; if result == 1 || result == 2) yield
+ path + ( if (result == 1) " [FAILED]" else " [TIMEOUT]" )
+ )
// Re-list all failures so we can go figure out what went wrong.
failures foreach System.err.println
if(!failures.isEmpty) sys.exit(1)
}
}
-
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
index 9c61097cb0..fc5792e886 100644
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ b/src/partest/scala/tools/partest/nest/TestFile.scala
@@ -12,6 +12,7 @@ import java.io.{ File => JFile }
import scala.tools.nsc.Settings
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io._
+import scala.util.Properties.{ propIsSet, propOrElse, setProp }
trait TestFileCommon {
def file: JFile
@@ -34,6 +35,10 @@ abstract class TestFile(val kind: String) extends TestFileCommon {
if (setOutDir)
settings.outputDirs setSingleOutput setOutDirTo.path
+ // adding code.jar to the classpath (to provide Code.lift services for reification tests)
+ settings.classpath prepend PathSettings.srcCodeLib.toString
+ if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
+
// have to catch bad flags somewhere
(flags forall (f => settings.processArgumentString(f)._1)) && {
settings.classpath append fileManager.CLASSPATH
@@ -61,6 +66,10 @@ case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends Te
super.defineSettings(settings, setOutDir) && {
// add the instrumented library version to classpath
settings.classpath prepend PathSettings.srcSpecLib.toString
+ // @partest maintainer: if we use a custom Scala build (specified via --classpath)
+ // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath
+ // this entire classpath business needs a thorough solution
+ if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", ""))
true
}
}
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index f74182e81c..3f2cb16082 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -53,6 +53,8 @@ class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
var CLASSPATH: String = join(origmanager.CLASSPATH, PathSettings.scalaCheck.path)
var LATEST_LIB: String = origmanager.LATEST_LIB
+ var LATEST_COMP: String = origmanager.LATEST_COMP
+ var LATEST_PARTEST: String = origmanager.LATEST_PARTEST
}
object Output {
@@ -518,7 +520,9 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor
runTestCommon(file, expectFailure = false)((logFile, outDir) => {
val dir = file.getParentFile
- execTest(outDir, logFile) && diffCheck(compareOutput(dir, logFile))
+ // adding code.jar to the classpath (to provide Code.lift services for reification tests)
+ execTest(outDir, logFile, PathSettings.srcCodeLib.toString) &&
+ diffCheck(compareOutput(dir, logFile))
})
// Apache Ant 1.6 or newer
diff --git a/src/partest/scala/tools/partest/utils/CodeTest.scala b/src/partest/scala/tools/partest/utils/CodeTest.scala
deleted file mode 100644
index c90168a313..0000000000
--- a/src/partest/scala/tools/partest/utils/CodeTest.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package utils
-
-import scala.reflect.Code
-import reflect.runtime.Mirror.ToolBox
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.Settings
-
-/** Runner for testing code tree liftingg
- */
-object CodeTest {
- def static[T](code: () => T, args: Array[String] = Array()) = {
- println("static: "+code())
- }
-
- def apply[T](code: Code[T], args: Array[String] = Array()) = {
- println("testing: "+code.tree)
- val reporter = new ConsoleReporter(new Settings)
- val toolbox = new ToolBox(reporter, args mkString " ")
- val ttree = toolbox.typeCheck(code.tree, code.manifest.tpe)
- println("result = " + toolbox.showAttributed(ttree))
- val evaluated = toolbox.runExpr(ttree)
- println("evaluated = "+evaluated)
- evaluated
- }
-}
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 73220d1048..466ec53c79 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -72,7 +72,7 @@ class ByteArrayReader(content: Array[Byte]) {
/** read an UTF8 encoded string
*/
def nextUTF8(len: Int): String = {
- val cs = scala.io.Codec.fromUTF8(buf.slice(bp, bp + len))
+ val cs = scala.io.Codec.fromUTF8(buf, bp, len)
bp += len
new String(cs)
}
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 72b3824157..2cbeaa945f 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -41,31 +41,5 @@ object Classfiles {
CONSTANT_INTFMETHODREF -> "InterfaceMethod",
CONSTANT_NAMEANDTYPE -> "NameAndType"
)
-
- final val BAD_ATTR = 0x00000
- final val SOURCEFILE_ATTR = 0x00001
- final val SYNTHETIC_ATTR = 0x00002
- final val DEPRECATED_ATTR = 0x00004
- final val CODE_ATTR = 0x00008
- final val EXCEPTIONS_ATTR = 0x00010
- final val CONSTANT_VALUE_ATTR = 0x00020
- final val LINE_NUM_TABLE_ATTR = 0x00040
- final val LOCAL_VAR_TABLE_ATTR = 0x00080
- final val INNERCLASSES_ATTR = 0x08000
- final val META_ATTR = 0x10000
- final val SCALA_ATTR = 0x20000
-
- final val SOURCEFILE_N = "SourceFile"
- final val SYNTHETIC_N = "Synthetic"
- final val DEPRECATED_N = "Deprecated"
- final val CODE_N = "Code"
- final val EXCEPTIONS_N = "Exceptions"
- final val CONSTANT_VALUE_N = "ConstantValue"
- final val LINE_NUM_TABLE_N = "LineNumberTable"
- final val LOCAL_VAR_TABLE_N = "LocalVariableTable"
- final val INNERCLASSES_N = "InnerClasses"
- final val META_N = "JacoMeta"
- final val SCALA_N = "ScalaSignature"
- final val CONSTR_N = "<init>"
}
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index db9d6c5ed9..02b940ab16 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -9,7 +9,7 @@
package scala.tools.scalap
import java.io._
-
+import scala.reflect.NameTransformer
class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer) {
@@ -32,22 +32,22 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
}
def nameToClass(str: String): String = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.Any" else res
}
def nameToClass0(str: String) = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.AnyRef" else res
}
def nameToSimpleClass(str: String) =
- Names.decode(str.substring(str.lastIndexOf('/') + 1))
+ NameTransformer.decode(str.substring(str.lastIndexOf('/') + 1))
def nameToPackage(str: String) = {
val inx = str.lastIndexOf('/')
val name = if (inx == -1) str else str.substring(0, inx).replace('/', '.')
- Names.decode(name)
+ NameTransformer.decode(name)
}
def sigToType(str: String): String =
@@ -119,9 +119,9 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
def printField(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) {
print(flagsToStr(false, flags))
if ((flags & 0x0010) != 0)
- print("val " + Names.decode(getName(name)))
+ print("val " + NameTransformer.decode(getName(name)))
else
- print("final var " + Names.decode(getName(name)))
+ print("final var " + NameTransformer.decode(getName(name)))
print(": " + getType(tpe) + ";").newline
}
@@ -139,20 +139,20 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
if (getName(name) == "<init>") {
print("def this" + getType(tpe) + ";").newline
} else {
- print("def " + Names.decode(getName(name)))
+ print("def " + NameTransformer.decode(getName(name)))
print(getType(tpe) + ";").newline
}
case Some(str) =>
if (getName(name) == "<init>")
print("def this" + str + ";").newline
else
- print("def " + Names.decode(getName(name)) + str + ";").newline
+ print("def " + NameTransformer.decode(getName(name)) + str + ";").newline
}
case None =>
if (getName(name) == "<init>") {
print("def this" + getType(tpe) + ";").newline
} else {
- print("def " + Names.decode(getName(name)))
+ print("def " + NameTransformer.decode(getName(name)))
print(getType(tpe) + ";").newline
}
}
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 7254b00480..a8a9c65f63 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -8,6 +8,7 @@
package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
+import scala.reflect.NameTransformer
import scalax.rules.scalasig._
import tools.nsc.util.{ ClassPath, JavaClassPath }
import tools.util.PathResolver
@@ -96,7 +97,7 @@ class Main {
*/
def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
// find the classfile
- val encName = Names.encode(
+ val encName = NameTransformer.encode(
if (classname == "scala.AnyRef") "java.lang.Object"
else classname)
val cls = path.findClass(encName)
diff --git a/src/scalap/scala/tools/scalap/Names.scala b/src/scalap/scala/tools/scalap/Names.scala
deleted file mode 100644
index 1d66b31ce3..0000000000
--- a/src/scalap/scala/tools/scalap/Names.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* ___ ____ ___ __ ___ ___
-** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
-** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
-**
-*/
-
-
-package scala.tools.scalap
-
-
-object Names {
-
- val operatorName = new Array[String](128)
- operatorName('$') = "$"
- operatorName('~') = "$tilde"
- operatorName('=') = "$eq"
- operatorName('<') = "$less"
- operatorName('>') = "$greater"
- operatorName('!') = "$bang"
- operatorName('#') = "$hash"
- operatorName('%') = "$percent"
- operatorName('^') = "$up"
- operatorName('&') = "$amp"
- operatorName('|') = "$bar"
- operatorName('*') = "$times"
- operatorName('/') = "$div"
- operatorName('\\') = "$bslash"
- operatorName('+') = "$plus"
- operatorName('-') = "$minus"
- operatorName(':') = "$colon"
-
- /** Replace operator symbols by corresponding "$op_name" in names.
- */
- def encode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c < 128) {
- val nop = operatorName(c)
- if (nop == null)
- res.append(c)
- else
- res.append(nop)
- } else
- res.append(c)
- i = i + 1
- }
- res.toString()
- }
-
- /** Replace "$op_name" by corresponding operator symbols in names.
- */
- def decode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c == '$') {
- var j = len
- while (j > i) {
- val prefix = name.substring(i, j)
- val c = lookup(prefix)
- if (c != null) {
- i = j
- res.append(c)
- } else
- j = j - 1
- }
- } else {
- i = i + 1
- res.append(c)
- }
- }
- res.toString()
- }
-
- /** Looks up the array entry for the operator name.
- */
- def lookup(string: String): String = {
- var i = 0
- var res: String = null
- while (i < 128) {
- if (string.equals(operatorName(i))) {
- res = String.valueOf(i.asInstanceOf[Char])
- i = 128
- }
- i = i + 1
- }
- res
- }
-
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index d53d8e1fc1..84f28af7ce 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -65,8 +65,11 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
* stores and array of bytes for the decompiler
*/
def fromUTF8StringAndBytes = {
- val chunk: Array[Byte] = bytes drop pos take length
- StringBytesPair(io.Codec.fromUTF8(chunk).mkString, chunk)
+ val chunk: Array[Byte] = new Array[Byte](length)
+ System.arraycopy(bytes, pos, chunk, 0, length)
+ val str = new String(io.Codec.fromUTF8(bytes, pos, length))
+
+ StringBytesPair(str, chunk)
}
def byte(i : Int) = bytes(pos) & 0xFF
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index df78bad25e..aa454934c1 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -13,9 +13,8 @@ package scalasig
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
-
import scala.tools.scalap.scalax.util.StringUtil
-import reflect.NameTransformer
+import scala.reflect.NameTransformer
import java.lang.String
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
diff --git a/src/swing/scala/swing/test/ButtonApp.scala b/src/swing/scala/swing/test/ButtonApp.scala
deleted file mode 100644
index dcf567d365..0000000000
--- a/src/swing/scala/swing/test/ButtonApp.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package scala.swing
-package test
-
-import java.awt.Dimension
-
-import swing._
-import swing.event._
-
-object ButtonApp extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "My Frame"
- contents = new GridPanel(2, 2) {
- hGap = 3
- vGap = 3
- contents += new Button {
- text = "Press Me!"
- reactions += {
- case ButtonClicked(_) => text = "Hello Scala"
- }
- }
- }
- size = new Dimension(300, 80)
- }
-}
-
diff --git a/src/swing/scala/swing/test/CelsiusConverter.scala b/src/swing/scala/swing/test/CelsiusConverter.scala
deleted file mode 100644
index 4ead632d7a..0000000000
--- a/src/swing/scala/swing/test/CelsiusConverter.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-
-/** A GUI app to convert celsius to centigrade
- */
-object CelsiusConverter extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Convert Celsius to Fahrenheit"
- val tempCelsius = new TextField
- val celsiusLabel = new Label {
- text = "Celsius"
- border = Swing.EmptyBorder(5, 5, 5, 5)
- }
- val convertButton = new Button {
- text = "Convert"//new javax.swing.ImageIcon("c:\\workspace\\gui\\images\\convert.gif")
- //border = Border.Empty(5, 5, 5, 5)
- }
- val fahrenheitLabel = new Label {
- text = "Fahrenheit "
- border = Swing.EmptyBorder(5, 5, 5, 5)
- listenTo(convertButton, tempCelsius)
-
- def convert() {
- val c = Integer.parseInt(tempCelsius.text)
- val f = c * 9 / 5 + 32
- text = "<html><font color = red>"+f+"</font> Fahrenheit</html>"
- }
-
- reactions += {
- case ButtonClicked(_) | EditDone(_) => convert()
- }
- }
- contents = new GridPanel(2,2) {
- contents.append(tempCelsius, celsiusLabel, convertButton, fahrenheitLabel)
- border = Swing.EmptyBorder(10, 10, 10, 10)
- }
- //defaultButton = Some(convertButton)
- }
-}
-
diff --git a/src/swing/scala/swing/test/CelsiusConverter2.scala b/src/swing/scala/swing/test/CelsiusConverter2.scala
deleted file mode 100644
index 5ce1b157fe..0000000000
--- a/src/swing/scala/swing/test/CelsiusConverter2.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-
-object CelsiusConverter2 extends SimpleSwingApplication {
- def newField = new TextField {
- text = "0"
- columns = 5
- horizontalAlignment = Alignment.Right
- }
- val celsius = newField
- val fahrenheit = newField
-
- listenTo(fahrenheit, celsius)
- reactions += {
- case EditDone(`fahrenheit`) =>
- val f = Integer.parseInt(fahrenheit.text)
- val c = (f - 32) * 5 / 9
- celsius.text = c.toString
- case EditDone(`celsius`) =>
- val c = Integer.parseInt(celsius.text)
- val f = c * 9 / 5 + 32
- fahrenheit.text = f.toString
- }
-
- lazy val ui = new FlowPanel(celsius, new Label(" Celsius = "),
- fahrenheit, new Label(" Fahrenheit")) {
- border = Swing.EmptyBorder(15, 10, 10, 10)
- }
- def top = new MainFrame {
- title = "Convert Celsius / Fahrenheit"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/ComboBoxes.scala b/src/swing/scala/swing/test/ComboBoxes.scala
deleted file mode 100644
index cf1a70d97b..0000000000
--- a/src/swing/scala/swing/test/ComboBoxes.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-import java.util.Date
-import java.awt.Color
-import java.text.SimpleDateFormat
-import javax.swing.{Icon, ImageIcon}
-
-/**
- * Demonstrates how to use combo boxes and custom item renderers.
- *
- * TODO: clean up layout
- */
-object ComboBoxes extends SimpleSwingApplication {
- import ComboBox._
- lazy val ui = new FlowPanel {
- contents += new ComboBox(List(1,2,3,4))
-
- val patterns = List("dd MMMMM yyyy",
- "dd.MM.yy",
- "MM/dd/yy",
- "yyyy.MM.dd G 'at' hh:mm:ss z",
- "EEE, MMM d, ''yy",
- "h:mm a",
- "H:mm:ss:SSS",
- "K:mm a,z",
- "yyyy.MMMMM.dd GGG hh:mm aaa")
- val dateBox = new ComboBox(patterns) { makeEditable() }
- contents += dateBox
- val field = new TextField(20) { editable = false }
- contents += field
-
- reactions += {
- case SelectionChanged(`dateBox`) => reformat()
- }
- listenTo(dateBox.selection)
-
- def reformat() {
- try {
- val today = new Date
- val formatter = new SimpleDateFormat(dateBox.selection.item)
- val dateString = formatter.format(today)
- field.foreground = Color.black
- field.text = dateString
- } catch {
- case e: IllegalArgumentException =>
- field.foreground = Color.red
- field.text = "Error: " + e.getMessage
- }
- }
-
-
- val icons = try {
- List(new ImageIcon(resourceFromClassloader("images/margarita1.jpg")),
- new ImageIcon(resourceFromClassloader("images/margarita2.jpg")),
- new ImageIcon(resourceFromClassloader("images/rose.jpg")),
- new ImageIcon(resourceFromClassloader("images/banana.jpg")))
- } catch {
- case _ =>
- println("Couldn't load images for combo box")
- List(Swing.EmptyIcon)
- }
-
- val iconBox = new ComboBox(icons) {
- renderer = new ListView.AbstractRenderer[Icon, Label](new Label) {
- def configure(list: ListView[_], isSelected: Boolean, focused: Boolean, icon: Icon, index: Int) {
- component.icon = icon
- component.xAlignment = Alignment.Center
- if(isSelected) {
- component.border = Swing.LineBorder(list.selectionBackground, 3)
- } else {
- component.border = Swing.EmptyBorder(3)
- }
- }
- }
- }
- contents += iconBox
- }
-
- def top = new MainFrame {
- title = "ComboBoxes Demo"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/CountButton.scala b/src/swing/scala/swing/test/CountButton.scala
deleted file mode 100644
index 373db785ba..0000000000
--- a/src/swing/scala/swing/test/CountButton.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.swing
-package test
-
-import scala.swing._
-import scala.swing.event._
-
-object CountButton extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "My Frame"
- contents = new GridPanel(2, 2) {
- hGap = 3
- vGap = 3
- val button = new Button {
- text = "Press Me!"
- }
- contents += button
- val label = new Label {
- text = "No button clicks registered"
- }
- contents += label
-
- listenTo(button)
- var nclicks = 0
- reactions += {
- case ButtonClicked(b) =>
- nclicks += 1
- label.text = "Number of button clicks: "+nclicks
- }
- }
- }
-}
diff --git a/src/swing/scala/swing/test/Dialogs.scala b/src/swing/scala/swing/test/Dialogs.scala
deleted file mode 100644
index 14fa2febf2..0000000000
--- a/src/swing/scala/swing/test/Dialogs.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-
-object Dialogs extends SimpleSwingApplication {
- import TabbedPane._
-
- lazy val label = new Label("No Result yet")
- lazy val tabs = new TabbedPane {
- pages += new Page("File", new GridBagPanel { grid =>
- import GridBagPanel._
- val buttonText = new TextField("Click Me")
-
- val c = new Constraints
- c.fill = Fill.Horizontal
- c.grid = (1,1)
-
- val chooser = new FileChooser
- layout(new Button(Action("Open") {
- chooser.showOpenDialog(grid)
- })) = c
-
- c.grid = (1,2)
- layout(new Button(Action("Save") {
- chooser.showSaveDialog(grid)
- })) = c
-
- c.grid = (1,3)
- layout(new Button(Action("Custom") {
- chooser.showDialog(grid, buttonText.text)
- })) = c
-
- c.grid = (2,3)
- layout(new Label(" with Text ")) = c
-
- c.grid = (3,3)
- c.ipadx = 50
- layout(buttonText) = c
-
- border = Swing.EmptyBorder(5, 5, 5, 5)
- })
- pages += new Page("Simple Modal Dialogs", new BorderPanel {
- import BorderPanel._
- val mutex = new ButtonGroup
- val ok = new RadioButton("OK (in the L&F's words)")
- val ynlf = new RadioButton("Yes/No (in the L&F's words)")
- val ynp = new RadioButton("Yes/No (in the programmer's words)")
- val yncp = new RadioButton("Yes/No/Cancel (in the programmer's words)")
- val radios = List(ok, ynlf, ynp, yncp)
- mutex.buttons ++= radios
- mutex.select(ok)
- val buttons = new BoxPanel(Orientation.Vertical) {
- contents ++= radios
- }
- layout(buttons) = Position.North
- layout(new Button(Action("Show It!") {
- import Dialog._
- mutex.selected.get match {
- case `ok` =>
- showMessage(buttons, "Eggs aren't supposed to be green.")
- case `ynlf` =>
- label.text = showConfirmation(buttons,
- "Would you like green eggs and ham?",
- "An Inane Question") match {
- case Result.Yes => "Ewww!"
- case Result.No => "Me neither!"
- case _ => "Come on -- tell me!"
- }
- case `ynp` =>
- val options = List("Yes, please",
- "No, thanks",
- "No eggs, no ham!")
- label.text = showOptions(buttons,
- "Would you like some green eggs to go with that ham?",
- "A Silly Question",
- entries = options,
- initial = 2) match {
- case Result.Yes => "You're kidding!"
- case Result.No => "I don't like them, either."
- case _ => "Come on -- 'fess up!"
- }
- case `yncp` =>
- val options = List("Yes, please",
- "No, thanks",
- "No eggs, no ham!")
- label.text = showOptions(buttons,
- message = "Would you like some green eggs to go with that ham?",
- title = "A Silly Question",
- entries = options,
- initial = 2) match {
- case Result.Yes => "Here you go: green eggs and ham!"
- case Result.No => "OK, just the ham, then."
- case Result.Cancel => "Well, I'm certainly not going to eat them!"
- case _ => "Please tell me what you want!"
- }
- }
- })) = Position.South
- })
- pages += new Page("More Dialogs", new BorderPanel {
- import BorderPanel._
- val mutex = new ButtonGroup
- val pick = new RadioButton("Pick one of several choices")
- val enter = new RadioButton("Enter some text")
- val custom = new RadioButton("Custom")
- val customUndec = new RadioButton("Custom undecorated")
- val custom2 = new RadioButton("2 custom dialogs")
- val radios = List(pick, enter, custom, customUndec, custom2)
- mutex.buttons ++= radios
- mutex.select(pick)
- val buttons = new BoxPanel(Orientation.Vertical) {
- contents ++= radios
- }
- layout(buttons) = Position.North
- layout(new Button(Action("Show It!") {
- import Dialog._
- mutex.selected.get match {
- case `pick` =>
- val possibilities = List("ham", "spam", "yam")
- val s = showInput(buttons,
- "Complete the sentence:\n\"Green eggs and...\"",
- "Customized Dialog",
- Message.Plain,
- Swing.EmptyIcon,
- possibilities, "ham")
-
- //If a string was returned, say so.
- label.text = if ((s != None) && (s.get.length > 0))
- "Green eggs and... " + s.get + "!"
- else
- "Come on, finish the sentence!"
- case `enter` =>
- val s = showInput(buttons,
- "Complete the sentence:\n\"Green eggs and...\"",
- "Customized Dialog",
- Message.Plain,
- Swing.EmptyIcon,
- Nil, "ham")
-
- //If a string was returned, say so.
- label.text = if ((s != None) && (s.get.length > 0))
- "Green eggs and... " + s.get + "!"
- else
- "Come on, finish the sentence!"
- case `custom` =>
- val dialog = new Dialog(top)
- dialog.open()
- dialog.contents = Button("Close Me!") { dialog.close() }
- case `customUndec` =>
- val dialog = new Dialog with RichWindow.Undecorated
- dialog.open()
- dialog.contents = Button("Close Me!") { dialog.close() }
- case `custom2` =>
- val d1 = new Dialog
- val d2 = new Dialog(d1)
- d1.open()
- d2.open()
- d1.contents = Button("Close Me! I am the owner and will automatically close the other one") { d1.close() }
- d2.contents = Button("Close Me!") { d2.close() }
- }
- })) = Position.South
- })
- }
-
- lazy val ui: Panel = new BorderPanel {
- layout(tabs) = BorderPanel.Position.Center
- layout(label) = BorderPanel.Position.South
- }
-
-
- lazy val top = new MainFrame {
- title = "Dialog Demo"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/GridBagDemo.scala b/src/swing/scala/swing/test/GridBagDemo.scala
deleted file mode 100644
index ebb538f1c0..0000000000
--- a/src/swing/scala/swing/test/GridBagDemo.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-import GridBagPanel._
-import java.awt.Insets
-
-object GridBagDemo extends SimpleSwingApplication {
- lazy val ui = new GridBagPanel {
- val c = new Constraints
- val shouldFill = true
- if (shouldFill) {
- c.fill = Fill.Horizontal
- }
-
- val button1 = new Button("Button 1")
-
- c.weightx = 0.5
-
- c.fill = Fill.Horizontal
- c.gridx = 0;
- c.gridy = 0;
- layout(button1) = c
-
- val button2 = new Button("Button 2")
- c.fill = Fill.Horizontal
- c.weightx = 0.5;
- c.gridx = 1;
- c.gridy = 0;
- layout(button2) = c
-
- val button3 = new Button("Button 3")
- c.fill = Fill.Horizontal
- c.weightx = 0.5;
- c.gridx = 2;
- c.gridy = 0;
- layout(button3) = c
-
- val button4 = new Button("Long-Named Button 4")
- c.fill = Fill.Horizontal
- c.ipady = 40; //make this component tall
- c.weightx = 0.0;
- c.gridwidth = 3;
- c.gridx = 0;
- c.gridy = 1;
- layout(button4) = c
-
- val button5 = new Button("5")
- c.fill = Fill.Horizontal
- c.ipady = 0; //reset to default
- c.weighty = 1.0; //request any extra vertical space
- c.anchor = Anchor.PageEnd
- c.insets = new Insets(10,0,0,0); //top padding
- c.gridx = 1; //aligned with button 2
- c.gridwidth = 2; //2 columns wide
- c.gridy = 2; //third row
- layout(button5) = c
- }
-
- def top = new MainFrame {
- title = "GridBag Demo"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/HelloWorld.scala b/src/swing/scala/swing/test/HelloWorld.scala
deleted file mode 100644
index 6014a14b2d..0000000000
--- a/src/swing/scala/swing/test/HelloWorld.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-
-/**
- * A simple swing demo.
- */
-object HelloWorld extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Hello, World!"
- contents = new Button("Click Me!")
- }
-} \ No newline at end of file
diff --git a/src/swing/scala/swing/test/LabelTest.scala b/src/swing/scala/swing/test/LabelTest.scala
deleted file mode 100644
index 47eedb84ac..0000000000
--- a/src/swing/scala/swing/test/LabelTest.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package scala.swing
-package test
-
-import scala.swing._
-import scala.swing.event._
-
-object LabelTest extends SimpleSwingApplication {
- def top = new MainFrame{
- contents = new Label {
- text = "Hello"
- import java.awt.event._
- listenTo(mouse.clicks)
- reactions += {
- case MousePressed(_,_,_,_,_) =>
- println("Mouse pressed2")
- }
- }
- }
-}
-
diff --git a/src/swing/scala/swing/test/LinePainting.scala b/src/swing/scala/swing/test/LinePainting.scala
deleted file mode 100644
index 8588665ddc..0000000000
--- a/src/swing/scala/swing/test/LinePainting.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.swing
-package test
-
-import scala.swing.Swing._
-import scala.swing.{MainFrame, Panel}
-import scala.swing.event._
-import java.awt.{Color, Graphics2D, Point, geom}
-
-/**
- * Dragging the mouse draws a simple graph
- *
- * @author Frank Teubler, Ingo Maier
- */
-object LinePainting extends SimpleSwingApplication {
- lazy val ui = new Panel {
- background = Color.white
- preferredSize = (200,200)
-
- focusable = true
- listenTo(mouse.clicks, mouse.moves, keys)
-
- reactions += {
- case e: MousePressed =>
- moveTo(e.point)
- requestFocusInWindow()
- case e: MouseDragged => lineTo(e.point)
- case e: MouseReleased => lineTo(e.point)
- case KeyTyped(_,'c',_,_) =>
- path = new geom.GeneralPath
- repaint()
- case _: FocusLost => repaint()
- }
-
- /* records the dragging */
- var path = new geom.GeneralPath
-
- def lineTo(p: Point) { path.lineTo(p.x, p.y); repaint() }
- def moveTo(p: Point) { path.moveTo(p.x, p.y); repaint() }
-
- override def paintComponent(g: Graphics2D) = {
- super.paintComponent(g)
- g.setColor(new Color(100,100,100))
- g.drawString("Press left mouse button and drag to paint." +
- (if(hasFocus) " Press 'c' to clear." else ""), 10, size.height-10)
- g.setColor(Color.black)
- g.draw(path)
- }
- }
-
- def top = new MainFrame {
- title = "Simple Line Painting Demo"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/ListViewDemo.scala b/src/swing/scala/swing/test/ListViewDemo.scala
deleted file mode 100644
index 2b8c8c0719..0000000000
--- a/src/swing/scala/swing/test/ListViewDemo.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.swing
-package test
-
-object ListViewDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- case class City(name: String, country: String, population: Int, capital: Boolean)
- val items = List(City("Lausanne", "Switzerland", 129273, false),
- City("Paris", "France", 2203817, true),
- City("New York", "USA", 8363710 , false),
- City("Berlin", "Germany", 3416300, true),
- City("Tokio", "Japan", 12787981, true))
- import ListView._
- contents = new FlowPanel(new ScrollPane(new ListView(items) {
- renderer = Renderer(_.name)
- }))
- //new ScrollPane(new Table(items)))
- }
-}
diff --git a/src/swing/scala/swing/test/SimpleApplet.scala b/src/swing/scala/swing/test/SimpleApplet.scala
deleted file mode 100644
index d5f17f8a40..0000000000
--- a/src/swing/scala/swing/test/SimpleApplet.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.swing
-package test
-
-import event._
-
-class SimpleApplet extends Applet {
- object ui extends UI with Reactor {
- def init() = {
- val button = new Button("Press here!")
- val text = new TextArea("Java Version: " + util.Properties.javaVersion + "\n")
- listenTo(button)
- reactions += {
- case ButtonClicked(_) => text.text += "Button Pressed!\n"
- case _ =>
- }
- contents = new BoxPanel(Orientation.Vertical) { contents.append(button, text) }
- }
- }
-}
diff --git a/src/swing/scala/swing/test/SwingApp.scala b/src/swing/scala/swing/test/SwingApp.scala
deleted file mode 100644
index b47d778d3a..0000000000
--- a/src/swing/scala/swing/test/SwingApp.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-
-object SwingApp extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "SwingApp"
- var numclicks = 0
- object label extends Label {
- val prefix = "Number of button clicks: "
- text = prefix + "0 "
- listenTo(button)
- reactions += {
- case ButtonClicked(button) =>
- numclicks = numclicks + 1
- text = prefix + numclicks
- }
- }
- object button extends Button {
- text = "I am a button"
- }
- contents = new FlowPanel {
- contents.append(button, label)
- border = Swing.EmptyBorder(5, 5, 5, 5)
- }
- }
-}
-
diff --git a/src/swing/scala/swing/test/TableSelection.scala b/src/swing/scala/swing/test/TableSelection.scala
deleted file mode 100644
index bbfef80277..0000000000
--- a/src/swing/scala/swing/test/TableSelection.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package scala.swing
-package test
-
-import java.awt.Dimension
-import swing.event._
-
-object TableSelection extends SimpleSwingApplication {
- val model = Array(List("Mary", "Campione", "Snowboarding", 5, false).toArray,
- List("Alison", "Huml", "Rowing", 5, false).toArray,
- List("Kathy", "Walrath", "Knitting", 5, false).toArray,
- List("Sharon", "Zakhour", "Speed reading", 5, false).toArray,
- List("Philip", "Milne", "Pool", 5, false).toArray)
- /*val model = Array.tabulate(10000) { i =>
- List("Mary", "Campione", "Snowboarding", i, false).toArray
- }*/
-
- lazy val ui = new BoxPanel(Orientation.Vertical) {
- val table = new Table(model, Array("First Name", "Last Name", "Sport", "# of Years", "Vegetarian")) {
- preferredViewportSize = new Dimension(500, 70)
- }
- //1.6:table.fillsViewportHeight = true
- listenTo(table.selection)
-
- contents += new ScrollPane(table)
- contents += new Label("Selection Mode")
-
- def radio(mutex: ButtonGroup, text: String): RadioButton = {
- val b = new RadioButton(text)
- listenTo(b)
- mutex.buttons += b
- contents += b
- b
- }
-
- val intervalMutex = new ButtonGroup
- val multiInterval = radio(intervalMutex, "Multiple Interval Selection")
- val elementInterval = radio(intervalMutex, "Single Selection")
- val singleInterval = radio(intervalMutex, "Single Interval Selection")
- intervalMutex.select(multiInterval)
-
- contents += new Label("Selection Options")
- val elemMutex = new ButtonGroup
- val rowSelection = radio(elemMutex, "Row Selection")
- val columnSelection = radio(elemMutex, "Column Selection")
- val cellSelection = radio(elemMutex, "Cell Selection")
- elemMutex.select(rowSelection)
-
- val output = new TextArea(5, 40) { editable = false }
- contents += new ScrollPane(output)
-
- def outputSelection() {
- output.append("Lead: " + table.selection.rows.leadIndex + "," +
- table.selection.columns.leadIndex + ". ")
- output.append("Rows:")
- for (c <- table.selection.rows) output.append(" " + c)
- output.append(". Columns:")
- for (c <- table.selection.columns) output.append(" " + c)
- output.append(".\n")
- }
-
- reactions += {
- case ButtonClicked(`multiInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.MultiInterval
- if (cellSelection.selected) {
- elemMutex.select(rowSelection)
- table.selection.elementMode = Table.ElementMode.None
- }
- cellSelection.enabled = false
- case ButtonClicked(`elementInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.Single
- cellSelection.enabled = true
- case ButtonClicked(`singleInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.SingleInterval
- cellSelection.enabled = true
- case ButtonClicked(`rowSelection`) =>
- if (rowSelection.selected)
- table.selection.elementMode = Table.ElementMode.Row
- case ButtonClicked(`columnSelection`) =>
- if (columnSelection.selected)
- table.selection.elementMode = Table.ElementMode.Column
- case ButtonClicked(`cellSelection`) =>
- if (cellSelection.selected)
- table.selection.elementMode = Table.ElementMode.Cell
- case TableRowsSelected(_, range, false) =>
- output.append("Rows selected, changes: " + range + "\n")
- outputSelection()
- case TableColumnsSelected(_, range, false) =>
- output.append("Columns selected, changes " + range + "\n")
- outputSelection()
- }
- }
-
- def top = new MainFrame {
- title = "Table Selection"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/UIDemo.scala b/src/swing/scala/swing/test/UIDemo.scala
deleted file mode 100644
index 9207c82948..0000000000
--- a/src/swing/scala/swing/test/UIDemo.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-import Swing._
-import ListView._
-
-object UIDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Scala Swing Demo"
-
- /*
- * Create a menu bar with a couple of menus and menu items and
- * set the result as this frame's menu bar.
- */
- menuBar = new MenuBar {
- contents += new Menu("A Menu") {
- contents += new MenuItem("An item")
- contents += new MenuItem(Action("An action item") {
- println("Action '"+ title +"' invoked")
- })
- contents += new Separator
- contents += new CheckMenuItem("Check me")
- contents += new CheckMenuItem("Me too!")
- contents += new Separator
- val a = new RadioMenuItem("a")
- val b = new RadioMenuItem("b")
- val c = new RadioMenuItem("c")
- val mutex = new ButtonGroup(a,b,c)
- contents ++= mutex.buttons
- }
- contents += new Menu("Empty Menu")
- }
-
- /*
- * The root component in this frame is a panel with a border layout.
- */
- contents = new BorderPanel {
- import BorderPanel.Position._
-
- var reactLive = false
-
- val tabs = new TabbedPane {
- import TabbedPane._
- val buttons = new FlowPanel {
- border = Swing.EmptyBorder(5,5,5,5)
-
- contents += new BoxPanel(Orientation.Vertical) {
- border = CompoundBorder(TitledBorder(EtchedBorder, "Radio Buttons"), EmptyBorder(5,5,5,10))
- val a = new RadioButton("Green Vegetables")
- val b = new RadioButton("Red Meat")
- val c = new RadioButton("White Tofu")
- val mutex = new ButtonGroup(a,b,c)
- contents ++= mutex.buttons
- }
- contents += new BoxPanel(Orientation.Vertical) {
- border = CompoundBorder(TitledBorder(EtchedBorder, "Check Boxes"), EmptyBorder(5,5,5,10))
- val paintLabels = new CheckBox("Paint Labels")
- val paintTicks = new CheckBox("Paint Ticks")
- val snapTicks = new CheckBox("Snap To Ticks")
- val live = new CheckBox("Live")
- contents.append(paintLabels, paintTicks, snapTicks, live)
- listenTo(paintLabels, paintTicks, snapTicks, live)
- reactions += {
- case ButtonClicked(`paintLabels`) =>
- slider.paintLabels = paintLabels.selected
- case ButtonClicked(`paintTicks`) =>
- slider.paintTicks = paintTicks.selected
- case ButtonClicked(`snapTicks`) =>
- slider.snapToTicks = snapTicks.selected
- case ButtonClicked(`live`) =>
- reactLive = live.selected
- }
- }
- contents += new Button(Action("Center Frame") { centerOnScreen() })
- }
- pages += new Page("Buttons", buttons)
- pages += new Page("GridBag", GridBagDemo.ui)
- pages += new Page("Converter", CelsiusConverter2.ui)
- pages += new Page("Tables", TableSelection.ui)
- pages += new Page("Dialogs", Dialogs.ui)
- pages += new Page("Combo Boxes", ComboBoxes.ui)
- pages += new Page("Split Panes",
- new SplitPane(Orientation.Vertical, new Button("Hello"), new Button("World")) {
- continuousLayout = true
- })
-
- val password = new FlowPanel {
- contents += new Label("Enter your secret password here ")
- val field = new PasswordField(10)
- contents += field
- val label = new Label(field.text)
- contents += label
- listenTo(field)
- reactions += {
- case EditDone(`field`) => label.text = field.password.mkString
- }
- }
-
- pages += new Page("Password", password)
- pages += new Page("Painting", LinePainting.ui)
- //pages += new Page("Text Editor", TextEditor.ui)
- }
-
- val list = new ListView(tabs.pages) {
- selectIndices(0)
- selection.intervalMode = ListView.IntervalMode.Single
- renderer = ListView.Renderer(_.title)
- }
- val center = new SplitPane(Orientation.Vertical, new ScrollPane(list), tabs) {
- oneTouchExpandable = true
- continuousLayout = true
- }
- layout(center) = Center
-
- /*
- * This slider is used above, so we need lazy initialization semantics.
- * Objects or lazy vals are the way to go, but objects give us better
- * type inference at times.
- */
- object slider extends Slider {
- min = 0
- value = tabs.selection.index
- max = tabs.pages.size-1
- majorTickSpacing = 1
- }
- layout(slider) = South
-
- /*
- * Establish connection between the tab pane, slider, and list view.
- */
- listenTo(slider)
- listenTo(tabs.selection)
- listenTo(list.selection)
- reactions += {
- case ValueChanged(`slider`) =>
- if(!slider.adjusting || reactLive) tabs.selection.index = slider.value
- case SelectionChanged(`tabs`) =>
- slider.value = tabs.selection.index
- list.selectIndices(tabs.selection.index)
- case SelectionChanged(`list`) =>
- if (list.selection.items.length == 1)
- tabs.selection.page = list.selection.items(0)
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/swing/scala/swing/test/images/banana.jpg b/src/swing/scala/swing/test/images/banana.jpg
deleted file mode 100644
index 62267a4325..0000000000
--- a/src/swing/scala/swing/test/images/banana.jpg
+++ /dev/null
Binary files differ
diff --git a/src/swing/scala/swing/test/images/margarita1.jpg b/src/swing/scala/swing/test/images/margarita1.jpg
deleted file mode 100644
index d315f7c79f..0000000000
--- a/src/swing/scala/swing/test/images/margarita1.jpg
+++ /dev/null
Binary files differ
diff --git a/src/swing/scala/swing/test/images/margarita2.jpg b/src/swing/scala/swing/test/images/margarita2.jpg
deleted file mode 100644
index c8b076e5f9..0000000000
--- a/src/swing/scala/swing/test/images/margarita2.jpg
+++ /dev/null
Binary files differ
diff --git a/src/swing/scala/swing/test/images/rose.jpg b/src/swing/scala/swing/test/images/rose.jpg
deleted file mode 100644
index d4a2b58062..0000000000
--- a/src/swing/scala/swing/test/images/rose.jpg
+++ /dev/null
Binary files differ