summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVojin Jovanovic <vojin.jovanovic@epfl.ch>2012-01-09 18:11:10 +0100
committerVojin Jovanovic <vojin.jovanovic@epfl.ch>2012-01-09 18:11:10 +0100
commit1126912bce5d098571c1bd29a04e4781e19c3d85 (patch)
tree79e753014c1adfa26e39e32911125bfc0ad8ee4c
parent5a4b555c378d79d57b59dfd6edafd2b9a59866bb (diff)
parent820491ed6376e9f8f8a8102387495113dce55444 (diff)
downloadscala-1126912bce5d098571c1bd29a04e4781e19c3d85.tar.gz
scala-1126912bce5d098571c1bd29a04e4781e19c3d85.tar.bz2
scala-1126912bce5d098571c1bd29a04e4781e19c3d85.zip
Merge branch 'master' into execution-context
-rw-r--r--.gitignore1
-rw-r--r--README.rst1
-rw-r--r--build.xml120
-rw-r--r--gitignore.SAMPLE23
-rw-r--r--project.SAMPLE (renamed from .project)0
-rw-r--r--project/Build.scala559
-rw-r--r--project/Layers.scala92
-rw-r--r--project/Partest.scala141
-rw-r--r--project/Release.scala115
-rw-r--r--project/Sametest.scala66
-rw-r--r--project/ShaResolve.scala117
-rw-r--r--project/VerifyClassLoad.scala46
-rw-r--r--project/build.properties11
-rw-r--r--project/build/AdditionalResources.scala81
-rw-r--r--project/build/BasicLayer.scala296
-rw-r--r--project/build/BuildInfoEnvironment.scala21
-rw-r--r--project/build/Comparator.scala72
-rw-r--r--project/build/Compilation.scala104
-rw-r--r--project/build/CompilationStep.scala39
-rw-r--r--project/build/ForkSBT.scala49
-rw-r--r--project/build/Packer.scala122
-rwxr-xr-xproject/build/Partest.scala370
-rw-r--r--project/build/PathConfig.scala43
-rw-r--r--project/build/SVN.scala36
-rw-r--r--project/build/ScalaBuildProject.scala36
-rw-r--r--project/build/ScalaSBTBuilder.scala362
-rw-r--r--project/build/ScalaTools.scala179
-rw-r--r--project/build/Scaladoc.scala48
-rw-r--r--project/plugins.sbt9
-rw-r--r--project/plugins/Plugins.scala6
-rw-r--r--project/project/Build.scala7
-rw-r--r--src/actors/scala/actors/ActorTask.scala13
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala13
-rw-r--r--src/compiler/scala/reflect/internal/Chars.scala36
-rw-r--r--src/compiler/scala/reflect/internal/Constants.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala386
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala33
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala10
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala221
-rw-r--r--src/compiler/scala/reflect/internal/NameManglers.scala53
-rw-r--r--src/compiler/scala/reflect/internal/Names.scala211
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala506
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala6
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala176
-rw-r--r--src/compiler/scala/reflect/internal/TreeGen.scala29
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala66
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala21
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala1053
-rw-r--r--src/compiler/scala/reflect/internal/pickling/UnPickler.scala24
-rw-r--r--src/compiler/scala/reflect/internal/util/Collections.scala158
-rw-r--r--src/compiler/scala/reflect/internal/util/Origins.scala (renamed from src/compiler/scala/tools/nsc/util/Origins.scala)16
-rw-r--r--src/compiler/scala/reflect/runtime/JavaToScala.scala48
-rw-r--r--src/compiler/scala/reflect/runtime/ScalaToJava.scala14
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala66
-rw-r--r--src/compiler/scala/reflect/runtime/TreeBuildUtil.scala10
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl1
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala3
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala8
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala45
-rw-r--r--src/compiler/scala/tools/nsc/MacroContext.scala10
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala8
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala18
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala16
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala116
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala44
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala156
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala64
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala43
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala32
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala67
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala44
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala18
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Dossiers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala31
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoop.scala69
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala113
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Imports.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala76
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Naming.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Power.scala122
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplVals.scala75
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala12
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala10
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala23
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala74
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala19
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala200
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala51
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala25
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala68
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala145
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala44
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala113
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala26
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala101
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala28
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala247
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala1282
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala106
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala105
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala15
-rw-r--r--src/compiler/scala/tools/nsc/util/ProxyReport.scala14
-rw-r--r--src/compiler/scala/tools/reflect/Mock.scala3
-rw-r--r--src/compiler/scala/tools/util/EditDistance.scala38
-rw-r--r--src/compiler/scala/tools/util/Javap.scala22
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala7
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala45
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala20
-rw-r--r--src/library/scala/Either.scala8
-rw-r--r--src/library/scala/Enumeration.scala35
-rw-r--r--src/library/scala/MatchingStrategy.scala23
-rw-r--r--src/library/scala/Option.scala7
-rw-r--r--src/library/scala/Symbol.scala4
-rw-r--r--src/library/scala/annotation/migration.scala18
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala5
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/Iterator.scala17
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala6
-rw-r--r--src/library/scala/collection/MapLike.scala4
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala5
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala11
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala247
-rw-r--r--src/library/scala/collection/immutable/Map.scala1
-rw-r--r--src/library/scala/collection/immutable/Range.scala122
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala22
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala99
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala4
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala30
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala30
-rw-r--r--src/library/scala/collection/mutable/Stack.scala6
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala13
-rw-r--r--src/library/scala/io/Codec.scala21
-rw-r--r--src/library/scala/math/BigDecimal.scala1
-rw-r--r--src/library/scala/math/Ordered.scala4
-rw-r--r--src/library/scala/math/Ordering.scala42
-rw-r--r--src/library/scala/reflect/ClassManifest.scala4
-rw-r--r--src/library/scala/reflect/Manifest.scala66
-rw-r--r--src/library/scala/reflect/api/MacroContext.scala15
-rw-r--r--src/library/scala/reflect/api/Trees.scala11
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala4
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java18
-rw-r--r--src/library/scala/util/Properties.scala5
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala26
-rw-r--r--src/library/scala/xml/Elem.scala4
-rw-r--r--src/library/scala/xml/MetaData.scala4
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala15
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala2
-rw-r--r--src/library/scala/xml/Utility.scala2
-rw-r--r--src/manual/scala/tools/docutil/EmitHtml.scala154
-rw-r--r--src/manual/scala/tools/docutil/EmitManPage.scala16
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala3
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala65
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala2
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala20
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Names.scala96
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala7
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala3
-rw-r--r--test/benchmarks/src/scala/collection/immutable/range-bench.scala61
-rw-r--r--test/disabled/properties.check (renamed from test/files/presentation/properties.check)0
-rw-r--r--test/disabled/properties/Runner.scala (renamed from test/files/presentation/properties/Runner.scala)0
-rw-r--r--test/disabled/properties/src/properties.scala (renamed from test/files/presentation/properties/src/properties.scala)0
-rw-r--r--test/files/continuations-neg/t2949.check2
-rwxr-xr-xtest/files/jvm/mkLibNatives.bat2
-rw-r--r--test/files/jvm/serialization.check8
-rw-r--r--test/files/jvm/xml03syntax.check2
-rw-r--r--test/files/neg/checksensible.check8
-rw-r--r--test/files/neg/logImplicits.check19
-rw-r--r--test/files/neg/logImplicits.flags1
-rw-r--r--test/files/neg/logImplicits.scala25
-rw-r--r--test/files/neg/migration28.check5
-rw-r--r--test/files/neg/names-defaults-neg.check17
-rw-r--r--test/files/neg/t1960.check2
-rw-r--r--test/files/neg/t5354.check7
-rw-r--r--test/files/neg/t5354.scala15
-rw-r--r--test/files/neg/t5357.check4
-rw-r--r--test/files/neg/t5357.scala9
-rwxr-xr-xtest/files/pos/t1459/App.scala2
-rw-r--r--test/files/pos/t4063.scala39
-rw-r--r--test/files/pos/t4070.scala37
-rw-r--r--test/files/pos/t4070b.scala35
-rw-r--r--test/files/pos/t4273.scala8
-rw-r--r--test/files/pos/t5020.scala19
-rw-r--r--test/files/pos/t5119.scala13
-rw-r--r--test/files/pos/t5175.flags1
-rw-r--r--test/files/pos/t5175.scala9
-rw-r--r--test/files/pos/t5317.scala12
-rw-r--r--test/files/pos/t5359.scala17
-rw-r--r--test/files/pos/virtpatmat_alts_subst.flags1
-rw-r--r--test/files/pos/virtpatmat_alts_subst.scala6
-rw-r--r--test/files/pos/virtpatmat_binding_opt.flags1
-rw-r--r--test/files/pos/virtpatmat_binding_opt.scala11
-rw-r--r--test/files/run/array-existential-bound.check4
-rw-r--r--test/files/run/array-existential-bound.scala17
-rw-r--r--test/files/run/mixin-bridge-methods.scala14
-rw-r--r--test/files/run/origins.scala2
-rw-r--r--test/files/run/repl-power.check22
-rw-r--r--test/files/run/repl-power.scala4
-rw-r--r--test/files/run/t3758.scala10
-rw-r--r--test/files/run/t4024.scala11
-rw-r--r--test/files/run/t4871.check2
-rw-r--r--test/files/run/t4871.scala12
-rw-r--r--test/files/run/t5053.check6
-rw-r--r--test/files/run/t5053.scala20
-rw-r--r--test/files/run/t5239.check13
-rw-r--r--test/files/run/t5266_1.check (renamed from test/pending/run/t5266_1.check)0
-rw-r--r--test/files/run/t5266_1.scala (renamed from test/files/run/t5239.scala)12
-rw-r--r--test/files/run/t5266_2.check (renamed from test/pending/run/t5266_2.check)0
-rw-r--r--test/files/run/t5266_2.scala (renamed from test/pending/run/t5266_2.scala)2
-rw-r--r--test/files/run/t5293.scala83
-rw-r--r--test/files/run/t5300.scala7
-rw-r--r--test/files/run/t5356.check6
-rw-r--r--test/files/run/t5356.scala12
-rw-r--r--test/files/run/treePrint.scala2
-rw-r--r--test/files/run/type-currying.check27
-rw-r--r--test/files/run/type-currying.scala58
-rw-r--r--test/files/run/virtpatmat_literal.scala3
-rw-r--r--test/files/run/virtpatmat_opt_sharing.check1
-rw-r--r--test/files/run/virtpatmat_opt_sharing.flags1
-rw-r--r--test/files/run/virtpatmat_opt_sharing.scala10
-rw-r--r--test/files/run/virtpatmat_unapplyprod.check4
-rw-r--r--test/files/run/virtpatmat_unapplyprod.flags1
-rw-r--r--test/files/run/virtpatmat_unapplyprod.scala23
-rw-r--r--test/files/run/xml-attribute.scala33
-rw-r--r--test/files/scalacheck/CheckEither.scala8
-rw-r--r--test/files/scalacheck/nan-ordering.scala130
-rw-r--r--test/files/scalacheck/range.scala81
-rwxr-xr-xtest/partest.bat1
-rw-r--r--test/pending/run/reify_closure1.check2
-rw-r--r--test/pending/run/reify_closure1.scala20
-rw-r--r--test/pending/run/reify_closure2a.check2
-rw-r--r--test/pending/run/reify_closure2a.scala20
-rw-r--r--test/pending/run/reify_closure2b.check2
-rw-r--r--test/pending/run/reify_closure2b.scala22
-rw-r--r--test/pending/run/reify_closure3a.check2
-rw-r--r--test/pending/run/reify_closure3a.scala22
-rw-r--r--test/pending/run/reify_closure3b.check2
-rw-r--r--test/pending/run/reify_closure3b.scala24
-rw-r--r--test/pending/run/reify_closure4a.check2
-rw-r--r--test/pending/run/reify_closure4a.scala22
-rw-r--r--test/pending/run/reify_closure4b.check2
-rw-r--r--test/pending/run/reify_closure4b.scala24
-rw-r--r--test/pending/run/reify_closure5a.check2
-rw-r--r--test/pending/run/reify_closure5a.scala20
-rw-r--r--test/pending/run/reify_closure5b.check2
-rw-r--r--test/pending/run/reify_closure5b.scala22
-rw-r--r--test/pending/run/reify_closure6.check3
-rw-r--r--test/pending/run/reify_closure6.scala26
-rw-r--r--test/pending/run/t5266_1.scala23
-rw-r--r--test/pending/run/t5334_1.scala15
-rw-r--r--test/pending/run/t5334_2.scala15
-rw-r--r--test/scaladoc/resources/SI_5054.scala10
-rw-r--r--test/scaladoc/resources/SI_5054_q1.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q2.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q3.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q4.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q5.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q6.scala9
-rw-r--r--test/scaladoc/resources/SI_5054_q7.scala22
-rw-r--r--test/scaladoc/resources/SI_5287.scala17
-rw-r--r--test/scaladoc/scala/html.flags1
-rw-r--r--test/scaladoc/scala/html/HtmlFactoryTest.flags1
-rw-r--r--test/scaladoc/scala/html/HtmlFactoryTest.scala96
-rwxr-xr-xtools/binary-repo-lib.sh4
-rwxr-xr-xtools/epfl-build28
-rwxr-xr-xtools/epfl-publish50
-rwxr-xr-xtools/get-scala-revision29
317 files changed, 8219 insertions, 5526 deletions
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index d392f0e82c..0000000000
--- a/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-*.jar
diff --git a/README.rst b/README.rst
index 951e1ce17c..940d948dd5 100644
--- a/README.rst
+++ b/README.rst
@@ -24,7 +24,6 @@ build script or user-created if needed. This is not a complete listing. ::
| +--scala-library.jar The stable reference ('starr') library jar
| +--scala-library-src.jar A snapshot of the source used to build starr.
| ---ant/ Support libraries for ant.
- +--project/ The (already legacy) 0.7 sbt build.
+--pull-binary-libs.sh Pulls binary artifacts from remote repository.
+--push-binary-libs.sh Pushes new binary artifacts and creates sha.
+--README.rst The file you are currently reading.
diff --git a/build.xml b/build.xml
index 6a708755a3..592036c67b 100644
--- a/build.xml
+++ b/build.xml
@@ -123,6 +123,18 @@ END-USER TARGETS
<antcall target="palo.done"/>
</target>
+ <target name="fastlocker"
+ description="Buildlocker without extra fuss">
+ <antcall target="locker.unlock"/>
+ <antcall target="locker.done"/>
+ </target>
+
+ <target name="buildlocker"
+ description="Does the same for locker as build does for quick">
+ <antcall target="locker.unlock"/>
+ <antcall target="palo.bin"/>
+ </target>
+
<target name="newlibs"
description="Requires libraries (MSIL, FJBG) to be rebuilt. Add this target before any other if class file format is incompatible.">
<property name="libs.outdated" value="yes"/>
@@ -189,9 +201,6 @@ PROPERTIES
<!-- if ANT_OPTS is already set by the environment, it will be unaltered,
but if it is unset it will take this default value. -->
<property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
-
- <!-- to find max heap usage: -Xaprof ; currently at 980M for locker.comp -->
- <echo message="Forking with JVM opts: ${env.ANT_OPTS} ${jvm.opts}" />
<property
name="scalacfork.jvmargs"
@@ -216,15 +225,14 @@ INITIALISATION
<property name="scalac.args.optimise" value=""/>
<!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
This is to facilitate testing new command line options which do not yet exist in starr. -->
- <property name="scalac.args.quickonly" value=""/>
-
+ <property name="scalac.args.quickonly" value=""/>
<property name="scalac.args.all" value="${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
<!-- Setting-up Ant contrib tasks -->
<taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
<!-- This is the start time for the distribution -->
<tstamp prefix="time">
- <format property="human" pattern="d MMMM yyyy, HH:mm:ss"/>
+ <format property="human" pattern="d MMMM yyyy, HH:mm:ss" locale="en,US"/>
<format property="short" pattern="yyyyMMddHHmmss"/>
</tstamp>
<!-- Find out whether we are running on Windows -->
@@ -236,7 +244,6 @@ INITIALISATION
<exec osfamily="windows" executable="tools/get-scala-revision.bat" outputproperty="git.describe" failifexecutionfails="false" />
<!-- some default in case something went wrong getting the revision -->
<property name="git.describe" value="-unknown-"/>
-
<property name="init.avail" value="yes"/>
<!-- Generating version number -->
@@ -244,10 +251,14 @@ INITIALISATION
<property
name="version.number"
value="${version.major}.${version.minor}.${version.patch}.${git.describe}"/>
-
+
<!-- And print-out what we are building -->
- <echo level="info" message="Build number is '${version.number}'"/>
- <echo level="info" message="Built ${time.human} from revision ${git.describe} with ${java.vm.name} ${java.version}"/>
+ <echo message=" build time: ${time.human}" />
+ <echo message=" java version: ${java.vm.name} ${java.version}" />
+ <echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
+ <echo message=" javac args: ${javac.args}" />
+ <echo message=" scalac args: ${scalac.args}" />
+ <echo message=" build number: ${version.number}" />
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
@@ -412,12 +423,34 @@ LOCAL REFERENCE BUILD (LOCKER)
<delete dir="${build-locker.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
- <target name="locker.unlock">
- <delete file="${build-locker.dir}/all.complete"/>
+ <target name="locker.unlock.pre-lib">
+ <uptodate property="locker.lib.available" targetfile="${build-locker.dir}/library.complete">
+ <srcfiles dir="${src.dir}">
+ <include name="library/**"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="locker.unlock.lib" depends="locker.unlock.pre-lib" unless="locker.lib.available">
<delete file="${build-locker.dir}/library.complete"/>
+ </target>
+
+ <target name="locker.unlock.pre-comp" depends="locker.unlock.lib">
+ <uptodate property="locker.comp.available" targetfile="${build-locker.dir}/compiler.complete">
+ <srcfiles dir="${src.dir}">
+ <include name="compiler/**"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="locker.unlock.comp" depends="locker.unlock.pre-comp" unless="locker.comp.available">
<delete file="${build-locker.dir}/compiler.complete"/>
</target>
+ <target name="locker.unlock" depends="locker.unlock.comp">
+ <delete file="${build-locker.dir}/all.complete" />
+ </target>
+
<!-- ===========================================================================
PACKED LOCKER BUILD (PALO)
============================================================================ -->
@@ -450,7 +483,10 @@ PACKED LOCKER BUILD (PALO)
<jar destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
<fileset dir="${build-locker.dir}/classes/compiler"/>
<!-- filemode / dirmode: see trac ticket #1294 -->
+ <zipfileset dirmode="755" filemode="644" src="${lib.dir}/fjbg.jar"/>
+ <zipfileset dirmode="755" filemode="644" src="${lib.dir}/msil.jar"/>
</jar>
+ <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
</target>
<target name="palo.done" depends="palo.comp">
@@ -460,6 +496,51 @@ PACKED LOCKER BUILD (PALO)
<delete dir="${build-palo.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
+ <target name="palo.pre-bin" depends="palo.comp">
+ <uptodate property="palo.bin.available" targetfile="${build-locker.dir}/bin.complete">
+ <srcfiles dir="${src.dir}">
+ <include name="compiler/scala/tools/ant/templates/**"/>
+ </srcfiles>
+ </uptodate>
+ </target>
+
+ <target name="palo.bin" depends="palo.pre-bin" unless="palo.bin.available">
+ <taskdef name="palo-bin" classname="scala.tools.ant.ScalaTool">
+ <classpath>
+ <pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-palo.dir}/lib/jline.jar"/>
+ </classpath>
+ </taskdef>
+ <mkdir dir="${build-palo.dir}/bin"/>
+ <palo-bin
+ file="${build-palo.dir}/bin/scala"
+ class="scala.tools.nsc.MainGenericRunner"
+ javaFlags="${java.flags}"/>
+ <palo-bin
+ file="${build-palo.dir}/bin/scalac"
+ class="scala.tools.nsc.Main"
+ javaFlags="${java.flags}"/>
+ <palo-bin
+ file="${build-palo.dir}/bin/scaladoc"
+ class="scala.tools.nsc.ScalaDoc"
+ javaFlags="${java.flags}"/>
+ <palo-bin
+ file="${build-palo.dir}/bin/fsc"
+ class="scala.tools.nsc.CompileClient"
+ javaFlags="${java.flags}"/>
+ <palo-bin
+ file="${build-palo.dir}/bin/scalap"
+ class="scala.tools.scalap.Main"
+ javaFlags="${java.flags}"/>
+ <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scala"/>
+ <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scalac"/>
+ <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scaladoc"/>
+ <chmod perm="ugo+rx" file="${build-palo.dir}/bin/fsc"/>
+ <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scalap"/>
+ <touch file="${build-locker.dir}/bin.complete" verbose="no"/>
+ </target>
+
<!-- ===========================================================================
QUICK BUILD (QUICK)
============================================================================ -->
@@ -662,7 +743,14 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.plugins.timer" action="total"/>
</target>
- <target name="quick.scalacheck" depends="quick.plugins">
+ <target name="quick.pre-scalacheck" depends="quick.plugins">
+ <uptodate property="quick.scalacheck.available" targetfile="${build-quick.dir}/scalacheck.complete">
+ <srcfiles dir="${src.dir}/scalacheck"/>
+ </uptodate>
+ </target>
+
+ <target name="quick.scalacheck" depends="quick.pre-scalacheck" unless="quick.scalacheck.available">
+ <stopwatch name="quick.scalacheck.timer"/>
<mkdir dir="${build-quick.dir}/classes/scalacheck"/>
<scalacfork
destdir="${build-quick.dir}/classes/scalacheck"
@@ -676,6 +764,8 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalacheck"/>
</compilationpath>
</scalacfork>
+ <touch file="${build-quick.dir}/scalacheck.complete" verbose="no"/>
+ <stopwatch name="quick.scalacheck.timer" action="total"/>
</target>
<target name="quick.pre-scalap" depends="quick.scalacheck">
@@ -1688,7 +1778,9 @@ DISTRIBUTION
</copy>
<mkdir dir="${dist.dir}/doc/scala-devel-docs/examples"/>
<copy toDir="${dist.dir}/doc/scala-devel-docs/examples">
- <fileset dir="${docs.dir}/examples"/>
+ <fileset dir="${docs.dir}/examples">
+ <exclude name="**/*.desired.sha1"/>
+ </fileset>
</copy>
<mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
<copy toDir="${dist.dir}/doc/scala-devel-docs/tools">
diff --git a/gitignore.SAMPLE b/gitignore.SAMPLE
index 3c6d8733ea..3c15a5de9e 100644
--- a/gitignore.SAMPLE
+++ b/gitignore.SAMPLE
@@ -2,24 +2,29 @@
/.gitignore
/test/files/.gitignore
-/.scala_dependencies
+*.jar
+*~
+
+#sbt
+/project/target/
+/project/project/target
-# "a" and "a/" to get both file (i.e. symlink) and folder
-/build
-/build/
-/target
/target/
+/src/jline/target/
+
+# target directories for ant build
+/build/
/dists/
+
+# other
/out/
/bin/
-
/sandbox/
+# eclipse, intellij
/.classpath
-
+/.project
/src/intellij/*.iml
/src/intellij/*.ipr
/src/intellij/*.iws
-/project/boot/
-/project/build/target/
diff --git a/.project b/project.SAMPLE
index b1f7386a4a..b1f7386a4a 100644
--- a/.project
+++ b/project.SAMPLE
diff --git a/project/Build.scala b/project/Build.scala
new file mode 100644
index 0000000000..abab775666
--- /dev/null
+++ b/project/Build.scala
@@ -0,0 +1,559 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+
+object ScalaBuild extends Build with Layers {
+ // New tasks/settings specific to the scala build.
+ lazy val lockerLock: TaskKey[Unit] = TaskKey("locker-lock",
+ "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ lazy val lockerUnlock: TaskKey[Unit] = TaskKey("locker-unlock",
+ "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ lazy val lockFile: SettingKey[File] = SettingKey("lock-file",
+ "Location of the lock file compiling this project.")
+ // New tasks/settings specific to the scala build.
+ lazy val lock: TaskKey[Unit] = TaskKey("lock", "Locks this project so it won't be recompiled.")
+ lazy val unlock: TaskKey[Unit] = TaskKey("unlock", "Unlocks this project so it will be recompiled.")
+ lazy val makeDist: TaskKey[File] = TaskKey("make-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ lazy val makeExplodedDist: TaskKey[File] = TaskKey("make-exploded-dist",
+ "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ lazy val makeDistMappings: TaskKey[Map[File, String]] = TaskKey("make-dist-mappings",
+ "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ lazy val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+
+ // Build wide settings:
+ override lazy val settings = super.settings ++ Seq(
+ autoScalaLibrary := false,
+ resolvers += Resolver.url(
+ "Typesafe nightlies",
+ url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
+ )(Resolver.ivyStylePatterns),
+ resolvers ++= Seq(
+ "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
+ ScalaToolsSnapshots
+ ),
+ organization := "org.scala-lang",
+ version := "2.10.0-SNAPSHOT",
+ pomExtra := <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>,
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(`scalacheck`, scalaInstance.key, setting) => fullQuickScalaReference mapKey Project.mapScope(_ => s.key.scope)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(`scalacheck`, _, s) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false, crossPaths := false)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+
+ // Collections of projects to run 'compile' on.
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, dbc, forkjoin, fjbg)
+ // Collection of projects to 'package' and 'publish' together.
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, dbc, continuationsPlugin, jline, scalap)
+ lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
+
+ private def epflPomExtra = (
+ <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>
+ )
+
+ // Settings used to make sure publishing goes smoothly.
+ def publishSettings: Seq[Setting[_]] = Seq(
+ ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
+ pomIncludeRepository := (_ => false),
+ publishMavenStyle := true,
+ makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
+ pomExtra := epflPomExtra
+ )
+
+ // Settings for root project. These are aggregate tasks against the rest of the build.
+ def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
+ doc in Compile <<= (doc in documentation in Compile).identity,
+ // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
+ compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
+ // TODO - just clean target? i.e. target map IO.deleteRecursively
+ clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
+ packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
+ // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
+ publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
+ publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
+ lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
+ Seq(lib,comp).foreach(f => IO.touch(f))
+ },
+ lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
+ Seq(lib,comp).foreach(IO.delete)
+ },
+ genBinQuick <<= (genBinQuick in scaladist).identity,
+ makeDist <<= (makeDist in scaladist).identity,
+ makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
+ // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
+ Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
+ },
+ // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
+ commands += Release.pushStarr
+ //commands += Release.setStarrHome
+ )
+ // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
+ lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
+
+ // External dependencies used for various projects
+ lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
+ Seq(
+ "org.apache.ant" % "ant" % "1.8.2",
+ "org.scala-tools.sbt" % "compiler-interface" % v % "provided"
+ )
+ )
+
+ // These are setting overrides for most artifacts in the Scala build file.
+ def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
+ crossPaths := false,
+ publishArtifact in packageDoc := false,
+ publishArtifact in packageSrc := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ (classDirectory in Compile) <<= target(_ / "classes"),
+ javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ javaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ // Most libs in the compiler use this order to build.
+ compileOrder in Compile := CompileOrder.JavaThenScala,
+ lockFile <<= target(_ / "compile.lock"),
+ skip in Compile <<= lockFile.map(_ exists),
+ lock <<= lockFile map { f => IO.touch(f) },
+ unlock <<= lockFile map IO.delete
+ )
+
+ // --------------------------------------------------------------
+ // Libraries used by Scalac that change infrequently
+ // (or hopefully so).
+ // --------------------------------------------------------------
+
+ // Jline nested project. Compile this sucker once and be done.
+ lazy val jline = Project("jline", file("src/jline"))
+ // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
+ lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
+ // Forkjoin backport
+ lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
+
+ // --------------------------------------------------------------
+ // The magic kingdom.
+ // Layered compilation of Scala.
+ // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
+ // --------------------------------------------------------------
+
+ // Need a report on this...
+ // TODO - Resolve STARR from a repo..
+ lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
+ val launcher = app.provider.scalaProvider.launcher
+ val library = file("lib/scala-library.jar")
+ val compiler = file("lib/scala-compiler.jar")
+ val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
+ ScalaInstance("starr", library, compiler, launcher, libJars: _*)
+ }
+
+ // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
+ lazy val (lockerLib, lockerComp) = makeLayer("locker", STARR)
+ lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerComp)
+
+ // Quick is the general purpose project layer for the Scala compiler.
+ lazy val (quickLib, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerComp, fjbg))
+ lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickComp)
+
+ // Reference to quick scala instance.
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickComp, fjbg)
+ def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
+
+ // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
+ lazy val (strappLib, strappComp) = makeLayer("strapp", quickScalaInstance)
+
+ // --------------------------------------------------------------
+ // Projects dependent on layered compilation (quick)
+ // --------------------------------------------------------------
+ def addCheaterDependency(projectName: String): Setting[_] =
+ pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
+ val dependency: scala.xml.Node =
+ <dependency>
+ <groupId>{o}</groupId>
+ <artifactid>{projectName}</artifactid>
+ <version>{v}</version>
+ </dependency>
+ def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
+ case <dependencies>{nested@_*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
+ case x => x
+ }
+ // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
+ def hasDependencies(root: scala.xml.Node): Boolean =
+ (root.child collectFirst {
+ case n: scala.xml.Elem if n.label == "dependencies" => n
+ } isEmpty)
+ // TODO - Keep namespace on project...
+ k andThen {
+ case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
+ case <project>{ nested@_*}</project> =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
+ }
+ }
+
+ // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
+ // TODO - Actors + swing separate jars...
+ lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
+ lazy val actors = Project("actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val dbc = Project("dbc", file(".")) settings(dependentProjectSettings:_*)
+ // TODO - Remove actors dependency from pom...
+ lazy val swing = Project("swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ // This project will generate man pages (in man1 and html) for scala.
+ lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
+ lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
+
+ // Things that compile against the compiler.
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+ lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
+ lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap)
+ lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
+ name := "scalap",
+ exportJars := true
+ )
+ lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
+
+ // --------------------------------------------------------------
+ // Continuations plugin + library
+ // --------------------------------------------------------------
+ lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ exportJars := true,
+ name := "continuations" // Note: This artifact is directly exported.
+
+ )
+ lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
+ lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
+ scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ }
+ )
+ lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
+
+ // TODO - OSGi Manifest
+
+ // --------------------------------------------------------------
+ // Real Library Artifact
+ // --------------------------------------------------------------
+ val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
+ def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin, actors).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
+ name := "scala-library",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
+
+ // --------------------------------------------------------------
+ // Real Compiler Artifact
+ // --------------------------------------------------------------
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaLibrary)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaCompiler, fjbg)
+
+ // --------------------------------------------------------------
+ // Testing
+ // --------------------------------------------------------------
+ /* lazy val scalacheckSettings: Seq[Setting[_]] = Seq(fullQuickScalaReference, crossPaths := false)*/
+ lazy val scalacheck = uri("git://github.com/rickynils/scalacheck.git")
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (swing, scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, swing, scalaLibrary, scalaCompiler, fjbg)
+ )
+
+ // --------------------------------------------------------------
+ // Generating Documentation.
+ // --------------------------------------------------------------
+
+ // TODO - Migrate this into the dist project.
+ // Scaladocs
+ def distScalaInstance = makeScalaReference("dist", scalaLibrary, scalaCompiler, fjbg)
+ lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
+ // TODO - Make these work for realz.
+ defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
+ "reflect/Print.scala" ||
+ "reflect/Symbol.scala" ||
+ "reflect/Tree.scala" ||
+ "reflect/Type.scala" ||
+ "runtime/*$.scala" ||
+ "runtime/ScalaRuntime.scala" ||
+ "runtime/StringAdd.scala" ||
+ "scala/swing/test/*"),
+ sourceFilter in Compile := ("*.scala"),
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
+ Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
+ },
+ compile := inc.Analysis.Empty,
+ scaladocOptions in Compile in doc <++= (baseDirectory) map (bd =>
+ Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
+ "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
+ "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
+ "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
+ )),
+ classpathOptions in Compile := ClasspathOptions.manual
+ )
+ lazy val documentation = (
+ Project("documentation", file("."))
+ settings (documentationSettings: _*)
+ dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
+ )
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+
+ class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+ }
+
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Map[File,String]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Map(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Map[File,String] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Map( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled).toMap
+ }
+ }
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Map[File,String]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ } toMap
+ }
+
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner",
+ "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Map[File,String]]("gen-bin",
+ "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory",
+ "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Map[File,String]]("gen-quick-bin",
+ "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Map[File,String]]("make-man",
+ "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Map[File,String]]("make-html",
+ "Runs the man maker project to generate html pages")
+
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,dbc,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ ) toMap
+ },
+ // Add in some more dependencies
+ makeDistMappings <<= (makeDistMappings,
+ packageBin in swing in Compile,
+ packageBin in dbc in Compile) map {
+ (dist, s, d) =>
+ dist ++ Seq(s -> "lib/scala-swing.jar", d -> "lib/scala-dbc.jar")
+ },
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps.values filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+}
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
diff --git a/project/Layers.scala b/project/Layers.scala
new file mode 100644
index 0000000000..d39e58014c
--- /dev/null
+++ b/project/Layers.scala
@@ -0,0 +1,92 @@
+import sbt._
+import Keys._
+import com.jsuereth.git.GitKeys.gitRunner
+
+/** This trait stores all the helper methods to generate layers in Scala's layered build. */
+trait Layers extends Build {
+ // TODO - Clean this up or use a self-type.
+
+ /** Default SBT overrides needed for layered compilation. */
+ def settingOverrides: Seq[Setting[_]]
+ /** Reference to the jline project */
+ def jline: Project
+ /** Reference to forkjoin library */
+ def forkjoin: Project
+ /** Reference to Fast-Java-Bytecode-Generator library */
+ def fjbg: Project
+ /** A setting that adds some external dependencies. */
+ def externalDeps: Setting[_]
+
+ /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
+ * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
+ */
+ def makeScalaReference(layer : String, library: Project, compiler: Project, fjbg: Project) =
+ scalaInstance <<= (appConfiguration in library,
+ version in library,
+ (exportedProducts in library in Compile),
+ (exportedProducts in compiler in Compile),
+ (exportedProducts in fjbg in Compile),
+ (fullClasspath in jline in Runtime)) map {
+ (app, version: String, lib: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath) =>
+ val launcher = app.provider.scalaProvider.launcher
+ (lib,comp) match {
+ case (Seq(libraryJar), Seq(compilerJar)) =>
+ ScalaInstance(
+ version + "-" + layer + "-",
+ libraryJar.data,
+ compilerJar.data,
+ launcher,
+ ((fjbg.files++jline.files):_*))
+ case _ => error("Cannot build a ScalaReference with more than one classpath element")
+ }
+ }
+
+ /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
+ * Returns the library project and compiler project from the next layer.
+ * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
+ */
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]]) : (Project, Project) = {
+ val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(
+ version := layer,
+ // TODO - use depends on.
+ unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
+ managedClasspath in Compile := Seq(),
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
+ defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
+ // TODO - Allow other scalac option settings.
+ scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
+ classpathOptions := ClasspathOptions.manual,
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged, gitRunner, streams) map Release.generatePropertiesFile("library.properties"),
+ referenceScala
+ )
+
+ // Define the compiler
+ val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(
+ version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
+ unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"),
+ defaultExcludes := ("tests"),
+ javacOptions ++= Seq("-source", "1.4"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (baseDirectory, version, resourceManaged, gitRunner, streams) map Release.generatePropertiesFile("compiler.properties"),
+ // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
+ // stick with that for now.
+ unmanagedResources in Compile <<= (baseDirectory) map {
+ (bd) =>
+ val dirs = Seq(bd / "src" / "compiler")
+ dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
+ },
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, fjbg, jline).map(exportedProducts in Compile in _).join.map(_.flatten),
+ classpathOptions := ClasspathOptions.manual,
+ externalDeps,
+ referenceScala
+ )
+
+ // Return the generated projects.
+ (library, compiler)
+ }
+
+}
diff --git a/project/Partest.scala b/project/Partest.scala
new file mode 100644
index 0000000000..6fc5e11958
--- /dev/null
+++ b/project/Partest.scala
@@ -0,0 +1,141 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import complete._
+import scala.collection.{ mutable, immutable }
+
+/** This object */
+object partest {
+
+ /** The key for the run-partest task that exists in Scala's test suite. */
+ lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
+ lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
+ lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
+ lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
+ lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
+ lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
+ lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
+
+ lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
+ javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
+ partestDirs <<= baseDirectory apply { bd =>
+ partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
+ },
+ partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
+ partestTests <<= partestTestsTask(partestDirs),
+ runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
+ runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
+ runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
+ )
+
+ // What's fun here is that we want "*.scala" files *and* directories in the base directory...
+ def partestResources(base: File, testType: String): PathFinder = testType match {
+ case "res" => base ** "*.res"
+ case "buildmanager" => base * "*"
+ // TODO - Only allow directories that have "*.scala" children...
+ case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
+ }
+ lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
+
+ // TODO - Figure out how to specify only a subset of resources...
+ def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
+ testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
+
+ // TODO - Split partest task into Configurations and build a Task for each Configuration.
+ // *then* mix all of them together for run-testsuite or something clever like this.
+ def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
+ (runner, testRuns, scalacOptions, streams) map {
+ (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
+ }
+ }
+ private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
+ val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
+ val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
+
+ import collection.JavaConverters._
+ val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
+ // TODO - save results
+ val failures = results collect {
+ case (path, 1) => path + " [FAILED]"
+ case (path, 2) => path + " [TIMEOUT]"
+ }
+
+ if (failures.isEmpty)
+ s.log.info(""+results.size+" tests passed.")
+ else {
+ failures foreach (s.log error _)
+ error("Test Failures! ("+failures.size+" of "+results.size+")")
+ }
+ }
+
+ def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
+ (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
+
+ /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
+ def cleanFileName(file: File): String = {
+ // TODO - Something intelligent here
+ val TestPattern = ".*/test/(.*)".r
+ file.getCanonicalPath match {
+ case TestPattern(n) => n
+ case _ => file.getName
+ }
+ }
+
+ // TODO - Allow a filter for the second part of this...
+ def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
+ import DefaultParsers._
+ state => {
+ Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
+ val files: Set[String] = testDirs get kind match {
+ case Some(dir) =>
+ partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
+ case _ =>
+ Set()
+ }
+ Space ~> token(NotSpace examples files) map (kind -> _)
+ }
+ }
+ }
+
+ def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
+ import sbinary.DefaultProtocol._
+
+ InputTask(testDirs apply runSingleTestParser) { result =>
+ (runner, result, testDirs, scalacOptions, streams) map {
+ case (r, (kind, filter), dirs, o, s) =>
+ // TODO - Use partest resources somehow to filter the filter correctly....
+ val files: Seq[File] =
+ if (filter == "*") partestResources(dirs(kind), kind).get
+ else (dirs(kind) * filter).get
+
+ runPartestImpl(r, Map(kind -> files), o, s)
+ }
+ }
+ }
+
+ def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: ScopedSetting[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
+ (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
+}
+
+class PartestRunner(classpath: Seq[File], javaOpts: String) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
+ val m = c.getMethod("mainReflect", classOf[Array[String]])
+ (c,m)
+ }
+ lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
+ def run(args: Array[String]): java.util.Map[String,Int] = try {
+ // TODO - undo this settings after running. Also globals are bad.
+ System.setProperty("partest.java_opts", javaOpts)
+ val allArgs = (classPathArgs ++ args).toArray
+ mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,Int]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
new file mode 100644
index 0000000000..5ed77548fc
--- /dev/null
+++ b/project/Release.scala
@@ -0,0 +1,115 @@
+import sbt._
+import Keys._
+import _root_.com.jsuereth.git.GitRunner
+
+object Release {
+
+ // TODO - move more of the dist project over here...
+
+
+ lazy val pushStarr = Command.command("push-starr") { (state: State) =>
+ def f(s: Setting[_]): Setting[_] = s.key.key match {
+ case version.key => // TODO - use full version
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => timeFormat format (new java.util.Date))
+ case organization.key =>
+ s.asInstanceOf[Setting[String]].mapInit( (_,_) => "org.scala-lang.bootstrapp")
+ // TODO - Switch publish repo to be typesafe starr repo.
+ case publishTo.key =>
+ s.asInstanceOf[Setting[Option[Resolver]]].mapInit((_,_) => Some("Starr Repo" at "http://typesafe.artifactoryonline.com/typesafe/starr-releases/"))
+ case _ => s
+ }
+ val extracted = Project.extract(state)
+ import extracted._
+ // Swap version on projects
+ val transformed = session.mergeSettings map ( s => f(s) )
+ val newStructure = Load.reapply(transformed, structure)
+ val newState = Project.setProject(session, newStructure, state)
+ // TODO - Run tasks. Specifically, push scala-compiler + scala-library. *Then* bump the STARR version locally.
+ // The final course of this command should be:
+ // publish-local
+ // Project.evaluateTask(publishLocal, newState)
+ // bump STARR version setting
+ // TODO - Define Task
+ // Rebuild quick + test to ensure it works
+ // Project.evaluateTask(test, newState)
+ // push STARR remotely
+ Project.evaluateTask(publish, newState)
+ // Revert to previous project state.
+ Project.setProject(session, structure, state)
+ }
+
+ // TODO - Autocomplete
+ /*lazy val setStarrHome = Command.single("set-starr-home") { (state: State, homeDir: String) =>
+ def f(s: Setting[_]): Setting[_] =
+ if(s.key.key == scalaInstance.key) {
+ s.asInstanceOf[Setting[ScalaInstance]] mapInit { (key, value) =>
+ if(value.version == "starr")
+ scalaInstance <<= appConfiguration map { app =>
+ val launcher = app.provider.scalaProvider.launcher
+ ScalaInstance("starr", new File(homeDir), launcher)
+ }
+ else value
+ }
+ } else s
+ val extracted = Project.extract(state)
+ import extracted._
+ val transformed = session.mergeSettings map f
+ val newStructure = Load.reapply(transformed, structure)
+ Project.setProject(session, newStructure, state)
+ }*/
+
+ lazy val timeFormat = {
+ val formatter = new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")
+ formatter.setTimeZone(java.util.TimeZone.getTimeZone("GMT"))
+ formatter
+ }
+
+ /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
+ * of any source file. */
+ def generatePropertiesFile(name: String)(baseDirectory: File, version: String, dir: File, git: GitRunner, s: TaskStreams): Seq[File] = {
+ // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
+ val target = dir / name
+ // TODO - Regenerate on triggers, like recompilation or something...
+ val fullVersion = makeFullVersionString(baseDirectory, version, git, s)
+ def hasSameVersion: Boolean = {
+ val props = new java.util.Properties
+ val in = new java.io.FileInputStream(target)
+ try props.load(in) finally in.close()
+ def withoutDate(s: String): String = s.reverse.dropWhile (_ != '.').reverse
+ withoutDate(fullVersion) == withoutDate(props getProperty "version.number")
+ }
+ if (!target.exists || !hasSameVersion) {
+ makePropertiesFile(target, fullVersion)
+ }
+ target :: Nil
+ }
+
+ // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
+ def makePropertiesFile(f: File, version: String): Unit =
+ IO.write(f, "version.number = "+version+"\ncopyright.string = Copyright 2002-2011, LAMP/EPFL")
+
+ def makeFullVersionString(baseDirectory: File, baseVersion: String, git: GitRunner, s: TaskStreams) = baseVersion+"."+getGitRevision(baseDirectory, git, currentDay, s)
+
+ // TODO - do we want this in the build number?
+ def currentDay = (new java.text.SimpleDateFormat("yyyyMMdd'T'HHmmss")) format (new java.util.Date)
+
+
+
+ def getGitRevision(baseDirectory: File, git: GitRunner, date: String, s: TaskStreams) = {
+
+ val mergeBase = {
+ // TODO - Cache this value.
+ // git("merge-base","v2.8.2","v2.9.1","master")(baseDirectory, s.log)
+ "df13e31bbb"
+ }
+ // current commit sha
+ val sha =
+ git("rev-list", "-n", "1", "HEAD")(baseDirectory, s.log)
+
+ val commits =
+ git("--no-pager", "log", "--pretty=oneline", mergeBase +"..HEAD")(baseDirectory, s.log) split "[\r\n]+" size
+
+ "rdev-%d-%s-g%s" format (commits, date, sha.substring(0,7))
+ }
+
+}
diff --git a/project/Sametest.scala b/project/Sametest.scala
new file mode 100644
index 0000000000..f44fe8ec65
--- /dev/null
+++ b/project/Sametest.scala
@@ -0,0 +1,66 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
+object SameTest {
+ lazy val checkSame: TaskKey[Unit] = TaskKey("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ lazy val checkSameLibrary: TaskKey[Unit] = TaskKey("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ lazy val checkSameCompiler: TaskKey[Unit] = TaskKey("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
+
+ def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
+ (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
+ compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
+ // Now we generate a complete set of relative files and then
+ def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
+ // This code adapted from SameTask in the compiler.
+ def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
+ filePairs exists { case (a,b) =>
+ if (!a.canRead || !b.canRead) {
+ s.log.error("Either ["+a+"] or ["+b+"] is missing.")
+ true
+ } else {
+ s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
+ val diff = !checkSingleFilePair(a,b)
+ if(diff) s.log.error("["+a+"] differs from ["+b+"]")
+ diff
+ }
+ }
+ }
+ val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
+ val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
+ val result = hasDifferentFiles(comparisons)
+ if (result) error("Binary artifacts differ.")
+ }
+
+ val bufferSize = 1024
+
+ // Tests whether two files are binary equivalents of each other.
+ def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
+ Using.fileInputStream(originFile) { originStream =>
+ Using.fileInputStream(destFile) { destStream =>
+ val originBuffer = new Array[Byte](bufferSize)
+ val destBuffer = new Array[Byte](bufferSize)
+ var equalNow = true
+ var originRemaining = originStream.read(originBuffer)
+ var destRemaining = destStream.read(destBuffer)
+ while (originRemaining > 0 && equalNow) {
+ if (originRemaining == destRemaining) {
+ for (idx <- 0 until originRemaining) {
+ equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
+ }
+ } else {
+ equalNow = false
+ }
+ originRemaining = originStream.read(originBuffer)
+ destRemaining = destStream.read(destBuffer)
+ }
+ if (destRemaining > 0) equalNow = false
+ equalNow
+ }
+ }
+ }
+
+
+}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
new file mode 100644
index 0000000000..f54e96c0c6
--- /dev/null
+++ b/project/ShaResolve.scala
@@ -0,0 +1,117 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import scala.collection.{ mutable, immutable }
+import scala.collection.parallel.CompositeThrowable
+import java.security.MessageDigest
+
+
+/** Helpers to resolve SHA artifacts from typesafe repo. */
+object ShaResolve {
+ import dispatch.{Http,url}
+ val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+
+ val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
+ val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
+ val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
+
+ def settings: Seq[Setting[_]] = Seq(
+ binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
+ pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs
+ )
+
+ def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ sha = getShaFromShafile(file)
+ } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
+ }
+
+ @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
+ case t: CompositeThrowable =>
+ s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
+ t.throwables foreach (t2 => s.log.trace(t2))
+ throw t
+ }
+
+ def getShaFromShafile(file: File): String = parseShaFile(file)._2
+
+ // This should calculate the SHA sum of a file the same as the linux process.
+ def calculateSha(file: File): String = {
+ val digest = MessageDigest.getInstance("SHA1")
+ val in = new java.io.FileInputStream(file);
+ val buffer = new Array[Byte](8192)
+ try {
+ def read(): Unit = in.read(buffer) match {
+ case x if x <= 0 => ()
+ case size => digest.update(buffer, 0, size); read()
+ }
+ read()
+ } finally in.close()
+ val sha = convertToHex(digest.digest())
+ sha
+ }
+
+ // TODO - Prettier way of doing this...
+ private def convertToHex(data: Array[Byte]): String = {
+ val buf = new StringBuffer
+ for (i <- 0 until data.length) {
+ var halfbyte = (data(i) >>> 4) & 0x0F;
+ var two_halfs = 0;
+ while(two_halfs < 2) {
+ if ((0 <= halfbyte) && (halfbyte <= 9))
+ buf.append(('0' + halfbyte).toChar)
+ else
+ buf.append(('a' + (halfbyte - 10)).toChar);
+ halfbyte = data(i) & 0x0F;
+ two_halfs += 1
+ }
+ }
+ return buf.toString
+ }
+
+ // Parses a sha file into a file and a sha.
+ def parseShaFile(file: File): (File, String) =
+ IO.read(file).split("\\s") match {
+ case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
+ case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
+ case _ => error(file.getAbsolutePath + " is an invalid sha file")
+ }
+
+
+ def isValidSha(file: File): Boolean =
+ try {
+ val (jar, sha) = parseShaFile(file)
+ jar.exists && calculateSha(jar) == sha
+ } catch {
+ case t: Exception => false
+ }
+
+
+ def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
+ val cachedFile = cacheDir / uri
+ if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
+ // Ensure the directory for the cache exists.
+ cachedFile.getParentFile.mkdirs()
+ val url = remote_urlbase + "/" + uri
+ val fous = new java.io.FileOutputStream(cachedFile)
+ s.log.info("Pulling [" + cachedFile + "] to cache")
+ try Http(dispatch.url(url) >>> fous) finally fous.close()
+ }
+ s.log.info("Pulling [" + file + "] from local cache")
+ IO.copyFile(cachedFile, file)
+ }
+
+ def pushFile(file: File, uri: String, user: String, pw: String): Unit = {
+ val url = remote_urlbase + "/" + uri
+ val sender = dispatch.url(url).PUT.as(user,pw) <<< (file, "application/java-archive")
+ // TODO - output to logger.
+ Http(sender >>> System.out)
+ }
+}
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
new file mode 100644
index 0000000000..c8eebb1159
--- /dev/null
+++ b/project/VerifyClassLoad.scala
@@ -0,0 +1,46 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
+object VerifyClassLoad {
+ lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
+ lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
+
+
+ def settings: Seq[Setting[_]] = Seq(
+ checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
+ fullClasspath in checkClassLoad := Seq(),
+ checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
+ import collection.JavaConverters._
+ val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
+
+ s.log.info("Processed " + results.size + " classes.")
+ val errors = results.filter(_._2 != null)
+ for( (name, result) <- results; if result != null) {
+ s.log.error(name + " had error: " + result)
+ }
+ if(errors.size > 0) error("Classload validation errors encountered")
+ ()
+ }
+ )
+
+ // TODO - Use
+ class ClassVerifyRunner(classpath: Seq[File]) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.util.VerifyClass")
+ val m = c.getMethod("run", classOf[Array[String]])
+ (c,m)
+ }
+ def run(args: Array[String]): java.util.Map[String,String] = try {
+ mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+ }
+}
diff --git a/project/build.properties b/project/build.properties
deleted file mode 100644
index 4775404a76..0000000000
--- a/project/build.properties
+++ /dev/null
@@ -1,11 +0,0 @@
-#Project properties
-#Sun Apr 11 14:24:47 CEST 2010
-project.name=scala
-def.scala.version=2.7.7
-sbt.version=0.7.7
-copyright=Copyright 2002-2011, LAMP/EPFL
-build.scala.versions=2.7.7
-project.initialize=false
-project.organization=ch.epfl.lamp
-partest.version.number=0.9.2
-project.version=2.8.1
diff --git a/project/build/AdditionalResources.scala b/project/build/AdditionalResources.scala
deleted file mode 100644
index d83d45b218..0000000000
--- a/project/build/AdditionalResources.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-import sbt._
-import java.util.jar.{Manifest}
-import java.io.{FileInputStream}
-import AdditionalResources._
-/**
- * Additional tasks that are required to obtain a complete compiler and library pair, but that are not part of the
- * compilation task. It copies additional files and generates the properties files
- * @author Grégory Moix
- */
-trait AdditionalResources {
- self : BasicLayer =>
-
- def writeProperties: Option[String] = {
- def write0(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: PropertiesToWrite => {
- c.writeProperties orElse write0(xs)
- }
- case _ => write0(xs)
- }
- case Nil => None
- }
- write0(allSteps.topologicalSort)
- }
-}
-
-object AdditionalResources {
- /**
- * A FileFilter that defines what are the files that will be copied
- */
- lazy val basicFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.properties" | "*.swf" | "*.png"
- implicit def stringToGlob(s: String): NameFilter = GlobFilter(s)
-}
-
-trait ResourcesToCopy {
- self : CompilationStep =>
-
- def getResources(from: Path, filter: FileFilter): PathFinder = (from ##)** filter
- def getResources(from: Path): PathFinder = getResources(from, AdditionalResources.basicFilter)
-
- def copyDestination: Path
- def filesToCopy: PathFinder
-
- def copy = {
- log.info("Copying files for "+name)
- try { FileUtilities.copy(filesToCopy.get, copyDestination, log) }
- catch { case e => Some(e.toString) }
-
- None
- }
-}
-
-trait PropertiesToWrite {
- self : CompilationStep =>
-
- def propertyList: List[(String, String)]
- def propertyDestination: Path
-
- def writeProperties: Option[String] ={
- import java.io._
- import java.util.Properties
-
- val properties = new Properties
-
- def insert(list: List[(String, String)]): Unit =
- list foreach { case (k, v) => properties.setProperty(k, v) }
-
- try {
- insert(propertyList)
- val destFile = propertyDestination.asFile
- val stream = new FileOutputStream(destFile)
- properties.store(stream, null)
- }
- catch {
- case e: Exception => Some(e.toString)
- }
- None
- }
-
-}
-
diff --git a/project/build/BasicLayer.scala b/project/build/BasicLayer.scala
deleted file mode 100644
index b333131d51..0000000000
--- a/project/build/BasicLayer.scala
+++ /dev/null
@@ -1,296 +0,0 @@
-import sbt._
-import xsbt.ScalaInstance
-import ScalaBuildProject._
-
-/**
- * Basic tasks and configuration shared by all layers. This class regroups the configuration and behaviour
- * shared by all layers.
- * @author Grégory Moix
- */
-abstract class BasicLayer(val info: ProjectInfo, val versionNumber: String, previousLayer: Option[BasicLayer])
- extends ScalaBuildProject
- with ReflectiveProject
- with AdditionalResources
- with LayerCompilation
- with BuildInfoEnvironment
- with ForkSBT {
- layer =>
-
- // All path values must be lazy in order to avoid initialization issues (sbt way of doing things)
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- val forkProperty = "scala.sbt.forked"
- def isDebug = info.logger atLevel Level.Debug
- def isForked = System.getProperty(forkProperty) != null
-
- // Support of triggered execution at project level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
- override def dependencies = info.dependencies
-
- lazy val copyright = property[String]
- lazy val partestVersionNumber = property[Version]
-
- lazy val nextLayer: Option[BasicLayer] = None
- def packingDestination : Path = layerOutput / "pack"
- lazy val libsDestination = packingDestination/ "lib"
- lazy val packedStarrOutput = outputRootPath / "pasta"
- lazy val requiredPluginsDirForCompilation = layerOutput / "misc" / "scala-devel" / "plugins"
-
- def compilerAdditionalJars: List[Path] = Nil
- def libraryAdditionalJars: List[Path] = Nil
-
- // TASKS
-
- /**
- * Before compiling the layer, we need to check that the previous layer
- * was created correctly and compile it if necessary
- */
- lazy val startLayer = previousLayer match {
- case Some(previous) => task(None) dependsOn previous.finishLayer
- case _ => task(None)
- }
-
- def buildLayer = externalCompilation orElse writeProperties
-
- lazy val build = compile
-
- lazy val compile = task(buildLayer) dependsOn startLayer
-
- /**
- * Finish the compilation and ressources copy and generation
- * It does nothing in itself. As sbt doesn't support conditional dependencies,
- * it permit locker to override it in order to lock the layer when the compilation
- * is finished.
- */
- lazy val finishLayer: ManagedTask = task(None) dependsOn compile
-
- def cleaningList = List(
- layerOutput,
- layerEnvironment.envBackingPath,
- packingDestination
- )
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- // We use super.task, so cleaning is done in every case, even when locked
- lazy val clean: Task = nextLayer match {
- case Some(next) => super.task(cleanFiles) dependsOn next.clean
- case _ => super.task(cleanFiles)
- }
- lazy val cleanBuild = task(cleanFiles orElse buildLayer) dependsOn startLayer
-
- // Utility methods (for quick access)
- def actorsOutput = actorsConfig.outputDirectory
- def actorsSrcDir = actorsConfig.srcDir
- def compilerOutput = compilerConfig.outputDirectory
- def compilerSrcDir = compilerConfig.srcDir
- def dbcOutput = dbcConfig.outputDirectory
- def libraryOutput = libraryConfig.outputDirectory
- def librarySrcDir = libraryConfig.srcDir
- def outputCompilerJar = compilerConfig.packagingConfig.jarDestination
- def outputLibraryJar = libraryWS.packagingConfig.jarDestination
- def outputPartestJar = partestConfig.packagingConfig.jarDestination
- def outputScalapJar = scalapConfig.packagingConfig.jarDestination
- def scalapOutput = scalapConfig.outputDirectory
- def swingOutput = swingConfig.outputDirectory
- def swingSrcDir = swingConfig.srcDir
-
- // CONFIGURATION OF THE COMPILATION STEPS
-
- /**
- * Configuration of the core library compilation
- */
- lazy val libraryConfig = new CompilationStep("library", pathLayout , log) with ResourcesToCopy with PropertiesToWrite {
- def label = "["+layer.name+"] library"
- def options: Seq[String] = Seq("-sourcepath", pathConfig.sources.absolutePath.toString)
- def dependencies = Nil
- override def classpath = super.classpath +++ forkJoinJar
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "library.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
- }
-
- /**
- * Configuration of the compiler
- */
- lazy val compilerConfig = new CompilationStep("compiler", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] compiler"
- private def bootClassPath : String = {
- System.getProperty("sun.boot.class.path")
- }
- override def classpath: PathFinder = super.classpath +++ fjbgJar +++ msilJar +++ jlineJar +++ antJar +++ forkJoinJar
- def options = Seq("-bootclasspath", bootClassPath)
- def dependencies = if (minimalCompilation) libraryConfig :: Nil else libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "compiler.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
-
- lazy val packagingConfig = {
- import java.util.jar.Manifest
- import java.io.FileInputStream
- val manifest = new Manifest(new FileInputStream(manifestPath.asFile))
- new PackagingConfiguration(libsDestination / compilerJarName, List(outputDirectory ##), manifest , compilerAdditionalJars)
- }
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/compilerJarName, List(outputDirectory ##))
-
- }
-
- //// ADDTIONNAL LIBRARIES ////
-
- /**
- * Config of the actors library
- */
- lazy val actorsConfig = new CompilationStep ("actors", pathLayout, log){
- def label = "["+layer.name+"] actors library"
- override def classpath: PathFinder = super.classpath +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
- }
-
- /**
- * Config of the dbc library
- */
- lazy val dbcConfig = new CompilationStep("dbc", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] dbc library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / dbcJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Config of the swing library
- */
- lazy val swingConfig = new CompilationStep("swing", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] swing library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / swingJarName,
- List(outputDirectory ##)
- )
- }
-
- ///// TOOLS CONFIGURATION ////////
-
- /**
- * Configuration of scalacheck
- */
- lazy val scalacheckConfig = new CompilationStep("scalacheck", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalacheck"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalacheckJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Configuration of scalap tool
- */
- lazy val scalapConfig = new CompilationStep("scalap", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalap"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: Nil
-
- val decoderProperties = (srcDir ## ) / "decoder.properties"
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalapJarName,
- List(outputDirectory ##, decoderProperties)
- )
- }
-
- /**
- * Configuration of the partest tool
- */
- lazy val partestConfig = new CompilationStep("partest", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] partest"
- override def classpath: PathFinder = super.classpath +++ antJar +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: scalapConfig :: actorsConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "partest.properties"
- def propertyList = List(
- ("version.number", partestVersionNumber.value.toString),
- ("copyright.string", copyright.value)
- )
-
- lazy val packagingConfig = new PackagingConfiguration(libsDestination / partestJarName, List(outputDirectory ##))
-
- }
-
- ///// PLUGINS CONFIGURATION ////////
-
- lazy val continuationPluginConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "plugin"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "plugin"
- def output: Path = pathLayout.classesOutput / "continuations" / "plugin"
- }
-
- new CompilationStep("continuation-plugin", config, log) with ResourcesToCopy with EarlyPackaging {
- def label = "["+layer.name+"] continuation plugin"
- def dependencies = libraryConfig :: compilerConfig :: Nil
- def options = Seq()
-
- def filesToCopy = (sourceRoots ##) / "scalac-plugin.xml"
- def copyDestination = outputDirectory
- def jarContent = List(outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(
- requiredPluginsDirForCompilation/"continuations.jar",
- List(outputDirectory ##)
- )
- lazy val earlyPackagingConfig = new PackagingConfiguration(
- pathLayout.outputDir / "misc" / "scala-devel" / "plugins" / "continuations.jar",
- List(outputDirectory ##)
- )
- }
- }
-
- lazy val continuationLibraryConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "library"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "library"
- def output: Path = pathLayout.classesOutput / "continuations" / "library"
- }
-
- new CompilationStep("continuation-library", config, log) {
- def label = "["+layer.name+"] continuation library"
- def dependencies = libraryConfig :: compilerConfig :: continuationPluginConfig :: Nil
- def options = Seq(
- "-Xpluginsdir",
- requiredPluginsDirForCompilation.absolutePath,
- "-Xplugin-require:continuations",
- "-P:continuations:enable"
- )
- }
- }
-
- // Grouping compilation steps
- def minimalCompilation = false // It must be true for locker because we do not need to compile everything
-
- def libraryWS: WrapperStep with Packaging
- def toolsWS: WrapperStep
-
- lazy val pluginsWS = new WrapperStep(continuationPluginConfig :: continuationLibraryConfig :: Nil)
- lazy val allSteps = new WrapperStep(libraryWS :: compilerConfig :: pluginsWS :: toolsWS :: Nil)
-}
diff --git a/project/build/BuildInfoEnvironment.scala b/project/build/BuildInfoEnvironment.scala
deleted file mode 100644
index fc1c436c33..0000000000
--- a/project/build/BuildInfoEnvironment.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-trait BuildInfoEnvironment {
- self : Project =>
- def buildInfoEnvironmentLocation: Path
- /**
- * Environment for storing properties that
- * 1) need to be saved across sbt session
- * 2) Are local to a layer
- * Used to save the last version of the compiler used to build the layer (for discarding it's product if necessary)
- */
- lazy val layerEnvironment = new BasicEnvironment {
- // use the project's Logger for any properties-related logging
- def log = self.log
-
- // the properties file will be read/stored
- def envBackingPath = buildInfoEnvironmentLocation
- // define some properties
- lazy val lastCompilerVersion: Property[String] = propertyOptional[String]("")
- }
-
-}
diff --git a/project/build/Comparator.scala b/project/build/Comparator.scala
deleted file mode 100644
index 7400788ba9..0000000000
--- a/project/build/Comparator.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-
-// Based on scala.tools.ant.Same
-object Comparator {
-
- private def getMappedPath(path: Path, baseDirectory: Path): Path = {
- Path.fromString(baseDirectory, path.relativePath)
- }
-
-
- def compare(origin: Path, dest: Path, filter: Path => PathFinder, log: Logger): Option[String] = {
- log.info("Comparing the contents of "+origin.absolutePath+ " with "+dest.absolutePath)
- var allEqualNow = true
-
- def reportDiff(f1: File, f2: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' is different from correspondant.")
- }
-
- def reportMissing(f1: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' has no correspondant.")
- }
-
-
-
- val originPaths = filter(origin).get
-
- val bufferSize = 1024
- val originBuffer = new Array[Byte](bufferSize)
- val destBuffer = new Array[Byte](bufferSize)
-
- for (originPath <- originPaths.filter(! _.isDirectory)){
- log.debug("origin :" + originPath.absolutePath)
- val destPath = getMappedPath(originPath, dest)
- log.debug("dest :" + destPath.absolutePath)
- var equalNow = true
- val originFile = originPath.asFile
- val destFile = destPath.asFile
-
- if (originFile.canRead && destFile.canRead) {
-
- val originStream = new FileInputStream(originFile)
- val destStream = new FileInputStream(destFile)
- var originRemaining = originStream.read(originBuffer)
- var destRemaining = destStream.read(destBuffer)
- while (originRemaining > 0 && equalNow) {
- if (originRemaining == destRemaining)
- for (idx <- 0 until originRemaining) {
- equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))}
- else
- equalNow = false
- originRemaining = originStream.read(originBuffer)
- destRemaining = destStream.read(destBuffer)
- }
- if (destRemaining > 0) equalNow = false
-
- if (!equalNow) reportDiff(originFile, destFile)
-
- originStream.close
- destStream.close
-
- }
- else reportMissing(originFile)
-
- }
- if(allEqualNow) None else Some("There were differences between "+origin.absolutePath+ " and "+ dest.absolutePath)
- }
-
-
-}
diff --git a/project/build/Compilation.scala b/project/build/Compilation.scala
deleted file mode 100644
index d581b2b736..0000000000
--- a/project/build/Compilation.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-import sbt._
-import xsbt.{AnalyzingCompiler, ScalaInstance}
-import FileUtilities._
-
-/**
- * This trait define the compilation task.
-* @author Grégory Moix
- */
-trait Compilation {
- self : ScalaBuildProject with BuildInfoEnvironment =>
-
- def lastUsedCompilerVersion = layerEnvironment.lastCompilerVersion
-
- def instantiationCompilerJar: Path
- def instantiationLibraryJar: Path
-
- def instanceScope[A](action: ScalaInstance => A): A = {
- val instance = ScalaInstance(instantiationLibraryJar.asFile, instantiationCompilerJar.asFile, info.launcher, msilJar.asFile, fjbgJar.asFile)
- log.debug("Compiler will be instantiated by :" +instance.compilerJar +" and :" +instance.libraryJar )
- action(instance)
- }
-
- def compile(stepList: Step, clean:() => Option[String]): Option[String] = compile(stepList, Some(clean))
- def compile(stepList: Step): Option[String] = compile(stepList, None)
- /**
- * Execute the different compilation parts one after the others.
- */
- def compile(stepsList: Step, clean: Option[() => Option[String]]): Option[String] ={
-
- instanceScope[Option[String]]{ scala =>
- lazy val analyzing = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
-
- def compilerVersionHasChanged = lastUsedCompilerVersion.value != scala.actualVersion
-
- def checkAndClean(cleanFunction:() => Option[String]): Option[String] ={
- if (compilerVersionHasChanged) {
- log.info("The compiler version used to build this layer has changed since last time or this is a clean build.")
- lastUsedCompilerVersion.update(scala.actualVersion)
- layerEnvironment.saveEnvironment
- cleanFunction()
- } else {
- log.debug("The compiler version is unchanged. No need for cleaning.")
- None
- }
- }
-
- def compile0(steps: List[Step]): Option[String] = {
- steps foreach {
- case c: CompilationStep =>
- val conditional = new CompileConditional(c, analyzing)
- log.info("")
- val res = conditional.run orElse copy(c) orElse earlyPackaging(c)
- if (res.isDefined)
- return res
- case _ => ()
- }
- None
- }
-
- /**
- * When we finishe to compile a step we want to jar if necessary in order to
- * be able to load plugins for the associated library
- */
- def earlyPackaging(step: CompilationStep): Option[String] = step match {
- case s: EarlyPackaging => {
- val c = s.earlyPackagingConfig
- log.debug("Creating jar for plugin")
- jar(c.content.flatMap(Packer.jarPattern(_)), c.jarDestination, c.manifest, false, log)
- }
- case _ => None
- }
-
- def copy(step: CompilationStep): Option[String] = step match {
- case s: ResourcesToCopy => s.copy
- case _ => None
- }
-
- def cleanIfNecessary: Option[String] = clean match {
- case None => None
- case Some(f) => checkAndClean(f)
- }
- cleanIfNecessary orElse compile0(stepsList.topologicalSort)
- }
- }
-
-
-}
-
-trait LayerCompilation extends Compilation {
- self : BasicLayer =>
-
- protected def cleanCompilation: Option[String] = {
- log.info("Cleaning the products of the compilation.")
- FileUtilities.clean(layerOutput :: Nil, true, log)
- }
-
- /**
- * Run the actual compilation. Should not be called directly because it is executed on the same jvm and that
- * it could lead to memory issues. It is used only when launching a new sbt process to do the compilation.
- */
- lazy val compilation = task {compile(allSteps, cleanCompilation _)}
-
- def externalCompilation: Option[String] = maybeFork(compilation)
-}
diff --git a/project/build/CompilationStep.scala b/project/build/CompilationStep.scala
deleted file mode 100644
index 000dca0234..0000000000
--- a/project/build/CompilationStep.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import sbt._
-import AdditionalResources._
-
-trait Step extends Dag[Step] {
- def dependencies: Iterable[Step]
-}
-
-class WrapperStep(contents: List[Step]) extends Step {
- def dependencies = contents
-}
-
-abstract class CompilationStep(val name: String, val pathConfig: PathConfig, logger: Logger) extends CompileConfiguration with Step {
- def this(name: String, layout: PathLayout, logger: Logger) = this(name, layout / name, logger)
-
- // Utility methods (for quick access, ...)
- final def srcDir = pathConfig.sources
-
- // Methods required for the compilation
- def log: Logger = logger
- final def sourceRoots : PathFinder = pathConfig.sources
- def sources: PathFinder = sourceRoots.descendentsExcept("*.java" | "*.scala", ".svn")
- final def projectPath: Path = pathConfig.projectRoot
- final def analysisPath: Path = pathConfig.analysis
- final def outputDirectory: Path = pathConfig.output
- def classpath = {
- def addDependenciesOutputTo(list: List[Step], acc: PathFinder): PathFinder = list match {
- case Nil => acc
- case x :: xs => x match {
- case c: CompilationStep => addDependenciesOutputTo(xs, acc +++ c.outputDirectory)
- case w: WrapperStep => addDependenciesOutputTo(xs, addDependenciesOutputTo(dependencies.toList, acc))
- }
- }
- addDependenciesOutputTo(dependencies.toList, outputDirectory)
- }
- def javaOptions: Seq[String] = "-target 1.5 -source 1.5 -g:none" split ' '
- def maxErrors: Int = 100
- def compileOrder = CompileOrder.JavaThenScala
- def fingerprints = Fingerprints(Nil, Nil)
-}
diff --git a/project/build/ForkSBT.scala b/project/build/ForkSBT.scala
deleted file mode 100644
index b30e35e61f..0000000000
--- a/project/build/ForkSBT.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/** Scala SBT build
- * Copyright 2005-2010 LAMP/EPFL
- * @author Paul Phillips
- */
-
-import sbt._
-
-/** Worked out a way to fork sbt tasks, preserving all sbt command line
- * options and without hardcoding anything.
- */
-trait ForkSBT {
- self: BasicLayer =>
-
- def jvmArguments: List[String] = {
- import scala.collection.jcl.Conversions._
- import java.lang.management.ManagementFactory
- ManagementFactory.getRuntimeMXBean().getInputArguments().toList
- }
-
- private var extraJVMArgs: List[String] = Nil
- def withJVMArgs[T](args: String*)(body: => T): T = {
- val saved = extraJVMArgs
- extraJVMArgs = args.toList
- try { body }
- finally extraJVMArgs = saved
- }
-
- // Set a property in forked sbts to inhibit possible forking cycles.
- def markForked = "-D" + forkProperty + "=true"
-
- /** Forks a new process to run "sbt task task ...":
- */
- def forkTasks(tasks: String*): Boolean = {
- require (!isForked, "Tried to fork but sbt is already forked: " + tasks.mkString(" "))
-
- val sbtJar = System.getProperty("java.class.path")
- val sbtMain = "xsbt.boot.Boot" // ok, much of anything.
- val args = jvmArguments ++ Seq(markForked, "-classpath", sbtJar, sbtMain) ++ tasks
-
- log.info("Forking: " + args.mkString("java ", " ", ""))
- Fork.java(None, args, StdoutOutput) == 0
- }
- def maybeFork(task: TaskManager#Task): Option[String] = maybeFork(task, "Error during external compilation.")
- def maybeFork(task: TaskManager#Task, errorMsg: String): Option[String] = {
- if (isForked) task.run
- else if (forkTasks("project " + this.name, task.name)) None
- else Some(errorMsg)
- }
-}
diff --git a/project/build/Packer.scala b/project/build/Packer.scala
deleted file mode 100644
index 73db5567b6..0000000000
--- a/project/build/Packer.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-import java.util.jar.Manifest
-import AdditionalResources._
-import FileUtilities._
-
-
-
-object Packer {
-
- /**
- * A filter that exclude files that musn't be in a jar file.
- */
- // We must exclude the manifest because we generate it automatically, and when we add multiples other jars, they could have
- // also a manifest files each, resulting in conflicts for the FileUtilities.jar(..) method
- def jarPattern(path: PathFinder) = path.descendentsExcept(AllPassFilter, (".*" - ".") || HiddenFileFilter || new ExactFilter("MANIFEST.MF")).get
-
- def createJar(j: Packaging, log: Logger): Option[String] = createJar(j.packagingConfig, log, jarPattern _, true)
- def createJar(j: PackagingConfiguration, log: Logger): Option[String] = createJar(j, log, jarPattern _, true)
-
-
- /**
- * Create a jar from the packaging trait. Is able to add directly others jars to it
- */
- def createJar(j: PackagingConfiguration, log: Logger, filter:(PathFinder) => Iterable[Path], addIncludedLibs: Boolean): Option[String] = {
- def pack0(content: Iterable[Path])= jar(content.flatMap(filter(_)), j.jarDestination, j.manifest, false, log)
-
- j.jarsToInclude match {
- case Nil => pack0(j.content)
- case list if addIncludedLibs => {
- withTemporaryDirectory(log) { tmp: File =>
- val tmpPath = Path.fromFile(tmp)
- log.debug("List of jars to be added : " +list)
- def unzip0(l: List[Path]): Option[String] = l match {
- case x :: xs => {unzip(x, tmpPath, log);unzip0(xs)} //TODO properly handle failing of unzip
- case Nil => None
- }
- unzip0(list)
- log.debug("Content of temp folder"+ tmpPath.##.**( GlobFilter("*")))
- pack0(j.content ++ Set(tmpPath ##))
- }
- }
- case _ => pack0(j.content)
-
- }
- }
-
-}
-
-/**
- * Create the jars of pack
- * @author Grégory Moix
- */
-trait Packer {
- self: BasicLayer =>
-
- def libraryToCopy: List[Path] = Nil
-
- /**
- * The actual pack task.
- */
- def packF = {
- import Packer._
- def iterate(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: Packaging => {
- createJar(c, log) orElse iterate(xs)
- }
- case _ => iterate(xs)
- }
- case Nil => None
- }
-
- def copy0 ={
- copyFile(manifestPath,packingDestination/"META-INF"/"MANIFEST.MF", log) orElse {
- copy(libraryToCopy, packingDestination , true, true, log) match {
- case Right(_) => None
- case Left(e) => Some(e)
- }
- }
- }
- iterate(allSteps.topologicalSort) orElse copy0
- }
- lazy val pack = task {packF}.dependsOn(finishLayer)
-}
-
-
-class PackagingConfiguration(val jarDestination: Path, val content: Iterable[Path], val manifest: Manifest, val jarsToInclude: List[Path]){
- def this(jarDestination: Path, content: Iterable[Path])= this(jarDestination, content, new Manifest, Nil)
- def this(jarDestination: Path, content: Iterable[Path], jarsToInclude: List[Path])= this(jarDestination, content, new Manifest, jarsToInclude)
-}
-
-trait Packaging extends Step {
- def packagingConfig: PackagingConfiguration
-}
-
-trait WrapperPackaging extends Packaging {
- self : WrapperStep =>
-
- def jarContent = {
- def getContent(list: List[Step], acc: List[Path]): List[Path] = list match {
- case Nil => acc
- case x :: xs => x match {
- case w: WrapperStep => getContent(xs, getContent(w.dependencies.toList, acc))
- case c: CompilationStep => getContent(xs, (c.outputDirectory ##) :: acc)
- }
- }
- getContent(dependencies.toList, Nil)
- }
-}
-
-/**
- * This trait is here to add the possiblity to have a different packing destination that is used right after the
- * compilation of the step has finished. It permits to have use libraries that are build using a plugin. (The plugin must
- * be a jar in order to be recognised by the compiler.
- */
-trait EarlyPackaging extends Packaging {
- self: CompilationStep =>
- //def earlyPackagingDestination: Path
- //def earlyJarDestination = earlyPackagingDestination / jarName
- def earlyPackagingConfig: PackagingConfiguration
-}
diff --git a/project/build/Partest.scala b/project/build/Partest.scala
deleted file mode 100755
index 7771c6f208..0000000000
--- a/project/build/Partest.scala
+++ /dev/null
@@ -1,370 +0,0 @@
-import sbt._
-import java.io.File
-import java.net.URLClassLoader
-import TestSet.{filter}
-
-class TestSet(val SType: TestSetType.Value, val kind: String, val description: String, val files: Array[File]){
- /**
- * @param a list of file that we want to know wheter they are members of the test set or not
- * @return two lists : the first contains files that are member of the test set, the second contains the files that aren't
- */
- def splitContent(f: List[File]):(List[File], List[File]) = {
- f.partition((f: File) => files.elements.exists((e: File) => f == e))
- }
-}
-
-object TestSet {
- def apply(sType: TestSetType.Value, kind: String, description: String, files: PathFinder)= new TestSet(sType, kind, description, filter(files))
- def filter(p: PathFinder): Array[File] =( p --- p **(HiddenFileFilter || GlobFilter("*.obj")||GlobFilter("*.log"))).getFiles.toArray
-}
-
-object TestSetType extends Enumeration {
- val Std, Continuations = Value
-}
-
-class TestConfiguration(val library: Path, val classpath: Iterable[Path], val testRoot: Path,
- val tests: List[TestSet], val junitReportDir: Option[Path]){
-}
-
-trait PartestRunner {
- self: BasicLayer with Packer =>
-
- import Partest.runTest
- import TestSetType._
-
- lazy val testRoot = projectRoot / "test"
- lazy val testFiles = testRoot / "files" ##
- lazy val testLibs = testFiles / "lib"
-
- lazy val posFilesTest = TestSet(Std,"pos", "Compiling files that are expected to build", testFiles / "pos" * ("*.scala" || DirectoryFilter))
- lazy val negFilesTest = TestSet(Std,"neg", "Compiling files that are expected to fail", testFiles / "neg" * ("*.scala" || DirectoryFilter))
- lazy val runFilesTest = TestSet(Std,"run", "Compiling and running files", testFiles / "run" * ("*.scala" || DirectoryFilter))
- lazy val jvmFilesTest = TestSet(Std,"jvm", "Compiling and running files", testFiles / "jvm" *("*.scala" || DirectoryFilter))
- lazy val resFilesTest = TestSet(Std,"res", "Running resident compiler scenarii", testFiles / "res" * ("*.res"))
- lazy val buildmanagerFilesTest = TestSet(Std,"buildmanager", "Running Build Manager scenarii", testFiles / "buildmanager" * DirectoryFilter)
- // lazy val scalacheckFilesTest = TestSet(Std,"scalacheck", "Running scalacheck tests", testFiles / "scalacheck" * ("*.scala" || DirectoryFilter))
- lazy val scriptFilesTest = TestSet(Std,"script", "Running script files", testFiles / "script" * ("*.scala"))
- lazy val shootoutFilesTest = TestSet(Std,"shootout", "Running shootout tests", testFiles / "shootout" * ("*.scala"))
- lazy val scalapFilesTest = TestSet(Std,"scalap", "Running scalap tests", testFiles / "scalap" * ("*.scala"))
- lazy val specializedFilesTest = TestSet(Std,"specialized", "Running specialized tests", testFiles / "specialized" * ("*.scala"))
-
- // lazy val negContinuationTest = TestSet(Continuations,"neg", "Compiling continuations files that are expected to fail", testFiles / "continuations-neg" * ("*.scala" || DirectoryFilter))
- // lazy val runContinuationTest = TestSet(Continuations,"run", "Compiling and running continuations files", testFiles / "continuations-run" ** ("*.scala" ))
- //
- // lazy val continuationScalaOpts = (
- // "-Xpluginsdir " +
- // continuationPluginConfig.packagingConfig.jarDestination.asFile.getParent +
- // " -Xplugin-require:continuations -P:continuations:enable"
- // )
-
- lazy val testSuiteFiles: List[TestSet] = List(
- posFilesTest, negFilesTest, runFilesTest, jvmFilesTest, resFilesTest,
- buildmanagerFilesTest,
- //scalacheckFilesTest,
- shootoutFilesTest, scalapFilesTest,
- specializedFilesTest
- )
- lazy val testSuiteContinuation: List[TestSet] = Nil // List(negContinuationTest, runContinuationTest)
-
- private lazy val filesTestMap: Map[String, TestSet] =
- Map(testSuiteFiles.map(s => (s.kind,s) ):_*)
- // + (("continuations-neg",negContinuationTest),("continuations-run", runContinuationTest))
-
- private lazy val partestOptions = List("-failed")
-
- private lazy val partestCompletionList: Seq[String] = {
- val len = testFiles.asFile.toString.length + 1
-
- filesTestMap.keys.toList ++ partestOptions ++
- (filesTestMap.values.toList flatMap (_.files) map (_.toString take len))
- }
-
- private def runPartest(tests: List[TestSet], scalacOpts: Option[String], failedOnly: Boolean) = {
-
- val config = new TestConfiguration(
- outputLibraryJar,
- (outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar +++ antJar +++ jlineJar +++ (testLibs * "*.jar")).get,
- testRoot,
- tests,
- None
- )
-
- val javaHome = Path.fromFile(new File(System.getProperty("java.home")))
- val java = Some(javaHome / "bin" / "java" asFile)
- val javac = Some(javaHome / "bin" / "javac" asFile)
- val timeout = Some("2400000")
- val loader = info.launcher.topLoader
-
- log.debug("Ready to run tests")
-
- if (tests.isEmpty) {
- log.debug("Empty test list")
- None
- }
- else runTest(
- loader, config, java, javac,
- scalacOpts, timeout, true, true,
- failedOnly, true, isDebug, log
- )
- }
-
- def partestDebugProp =
- if (isDebug) List("-Dpartest.debug=true")
- else Nil
-
- lazy val externalPartest = task { args =>
- task {
- if (isForked) partest(args).run
- else withJVMArgs(partestDebugProp ++ args: _*) {
- if (forkTasks("partest")) None
- else Some("Some tests failed.")
- }
- } dependsOn pack
- } completeWith partestCompletionList
-
- lazy val partest = task { args =>
- var failedOnly = false
-
- def setOptions(options: List[String], acc: List[String]): List[String] = options match {
- case "-failed" :: xs =>
- failedOnly = true
- log.info("Only tests that failed previously will be run")
- setOptions(xs, acc)
- case x :: xs =>
- setOptions(xs, x :: acc)
- case _ => acc
- }
-
- def resolveSets(l: List[String], rem: List[String], acc: List[TestSet]): (List[String], List[TestSet]) = {
- def searchSet(arg: String): Option[TestSet] = filesTestMap get arg
-
- l match {
- case x :: xs => searchSet(x) match {
- case Some(s) => resolveSets(xs, rem, s :: acc)
- case None => resolveSets(xs, x :: rem, acc)
- }
- case Nil => (rem, acc)
- }
- }
-
- def resolveFiles(l: List[String], sets: List[TestSet]):(List[String], List[TestSet]) = {
- def resolve0(filesToResolve: List[File], setsToSearchIn: List[TestSet], setAcc: List[TestSet]):(List[String], List[TestSet])= {
- filesToResolve match {
- case Nil => (Nil, setAcc) // If we have no files left to resolve, we can return the list of the set we have
- case list => {
- setsToSearchIn match {
- case Nil => (list.map(_.toString), setAcc)// If we already had search all sets to find a match, we return the list of the files that where problematic and the set we have
- case x :: xs => {
- val (found, notFound)= x.splitContent(list)
- if(!found.isEmpty){
- val newSet = new TestSet(x.SType, x.kind, x.description, found.toArray)
- resolve0(notFound, xs, newSet :: setAcc)
- } else {
- resolve0(notFound, xs, setAcc)
- }
- }
- }
- }
- }
-
- }
-
- resolve0(l.map(Path.fromString(testFiles, _).asFile), filesTestMap.values.toList, sets)
- }
-
- val keys = setOptions(args.toList, Nil)
-
- if (keys.isEmpty) {
- task { runPartest(testSuiteFiles, None, failedOnly) }
- }
- else {
- val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- val (notFound, allSets) = resolveFiles(fileNames, sets)
- if (!notFound.isEmpty)
- log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
-
- task { runPartest(allSets, None, failedOnly) }
- }
- // if (keys.length == 0) task {
- // runPartest(testSuiteFiles, None, failedOnly) orElse {
- // runPartest(testSuiteContinuation, None, failedOnly)
- // } // this is the case where there were only config options, we will run the standard test suite
- // }
- // else {
- // val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- // val (notFound, allSets) = resolveFiles(fileNames, sets)
- // if (!notFound.isEmpty)
- // log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
- //
- // val (std, continuations) = allSets partition (_.SType == TestSetType.Std)
- // task {
- // runPartest(std, None, failedOnly) orElse {
- // runPartest(continuations, Some(continuationScalaOpts), failedOnly)
- // }
- // }
- // }
- }.completeWith(partestCompletionList)
-
-}
-
-object Partest {
- def runTest(
- parentLoader: ClassLoader,
- config: TestConfiguration,
- javacmd: Option[File],
- javaccmd: Option[File],
- scalacOpts: Option[String],
- timeout: Option[String],
- showDiff: Boolean,
- showLog: Boolean,
- runFailed: Boolean,
- errorOnFailed: Boolean,
- debug: Boolean,
- log: Logger
- ): Option[String] = {
-
- if (debug)
- log.setLevel(Level.Debug)
-
- if (config.classpath.isEmpty)
- return Some("The classpath is empty")
-
- log.debug("Classpath is "+ config.classpath)
-
- val classloader = new URLClassLoader(
- Array(config.classpath.toSeq.map(_.asURL):_*),
- ClassLoader.getSystemClassLoader.getParent
- )
- val runner: AnyRef =
- classloader.loadClass("scala.tools.partest.nest.SBTRunner").newInstance().asInstanceOf[AnyRef]
- val fileManager: AnyRef =
- runner.getClass.getMethod("fileManager", Array[Class[_]](): _*).invoke(runner, Array[Object](): _*)
-
- val runMethod =
- runner.getClass.getMethod("reflectiveRunTestsForFiles", Array(classOf[Array[File]], classOf[String]): _*)
-
- def runTestsForFiles(kindFiles: Array[File], kind: String) = {
- val result = runMethod.invoke(runner, Array(kindFiles, kind): _*).asInstanceOf[java.util.HashMap[String, Int]]
- scala.collection.jcl.Conversions.convertMap(result)
- }
-
- def setFileManagerBooleanProperty(name: String, value: Boolean) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[Boolean]): _*)
- setMethod.invoke(fileManager, Array(java.lang.Boolean.valueOf(value)).asInstanceOf[Array[Object]]: _*)
- }
-
- def setFileManagerStringProperty(name: String, value: String) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[String]): _*)
- setMethod.invoke(fileManager, Array(value).asInstanceOf[Array[Object]]: _*)
- }
-
- // System.setProperty("partest.srcdir", "files")
-
- setFileManagerBooleanProperty("showDiff", showDiff)
- setFileManagerBooleanProperty("showLog", showLog)
- setFileManagerBooleanProperty("failed", runFailed)
- if (!javacmd.isEmpty)
- setFileManagerStringProperty("JAVACMD", javacmd.get.getAbsolutePath)
- if (!javaccmd.isEmpty)
- setFileManagerStringProperty("JAVAC_CMD", "javac")
- setFileManagerStringProperty("CLASSPATH", (config.classpath.map(_.absolutePath).mkString(File.pathSeparator)))
- setFileManagerStringProperty("LATEST_LIB", config.library.absolutePath)
- setFileManagerStringProperty("SCALAC_OPTS", scalacOpts getOrElse "")
-
- if (!timeout.isEmpty)
- setFileManagerStringProperty("timeout", timeout.get)
-
- type TFSet = (Array[File], String, String)
-
- val testFileSets = config.tests
-
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == 0) partition (_ == true)
- def count(i: Iterable[_]): Int ={
- var c = 0
- for (elem <-i) yield {
- c = c+1
- }
- c
- }
- (count(files), count(failures))
- }
-
-
- def runSet(set: TestSet): (Int, Int, Iterable[String]) = {
- val (files, name, msg) = (set.files, set.kind, set.description)
- log.debug("["+name+"] "+ msg+files.mkString(", files :\n","\n",""))
- if (files.isEmpty) {
- log.debug("No files !")
- (0, 0, List())
- }
- else {
- log.info(name +" : "+ msg)
- val results: Iterable[(String, Int)] = runTestsForFiles(files, name)
- val (succs, fails) = resultsToStatistics(results)
-
- val failed: Iterable[String] = results.filter( _._2!=0) map(_ match {
- case (path, 1) => path + " [FAILED]"
- case (path, 2) => path + " [TIMOUT]"
- })
-
- val r =(succs, fails, failed)
-
- config.junitReportDir match {
- case Some(d) => {
- val report = testReport(name, results, succs, fails)
- scala.xml.XML.save(d/name+".xml", report)
- }
- case None =>
- }
-
- r
- }
- }
-
- val _results = testFileSets map runSet
- val allSuccesses = _results.map (_._1).foldLeft(0)( _ + _ )
- val allFailures = _results.map (_._2).foldLeft(0)( _ + _ )
- val allFailedPaths = _results flatMap (_._3)
-
- def f(msg: String): Option[String] =
- if (errorOnFailed && allFailures > 0) {
- Some(msg)
- }
- else {
- log.info(msg)
- None
- }
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0) "Test suite finished with %d case%s failing.\n".format(allFailures, s)+ allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
-
- }
-
- private def oneResult(res: (String, Int)) =
- <testcase name ={res._1}>{
- res._2 match {
- case 0 => scala.xml.NodeSeq.Empty
- case 1 => <failure message="Test failed"/>
- case 2 => <failure message="Test timed out"/>
- }
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
- <testsuite name ={kind} tests ={(succs + fails).toString} failures ={fails.toString}>
- <properties/>
- {
- results.map(oneResult(_))
- }
- </testsuite>
-
-
-}
diff --git a/project/build/PathConfig.scala b/project/build/PathConfig.scala
deleted file mode 100644
index 3ed56846f9..0000000000
--- a/project/build/PathConfig.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import sbt._
-
-/**
- * An abstract class for grouping all different paths that are needed to
- * compile the a CompilationStep
- * @author Grégory Moix
- */
-abstract class PathConfig {
- def projectRoot: Path
- def sources: Path
- def analysis: Path
- def output: Path
-}
-
-object PathConfig {
- val classes = "classes"
- val analysis = "analysis"
-}
-
-trait SimpleOutputLayout {
- def outputDir: Path
- lazy val classesOutput = outputDir / PathConfig.classes
- lazy val analysisOutput = outputDir / PathConfig.analysis
-
-}
-
-class PathLayout(val projectRoot: Path, val outputDir: Path) extends SimpleOutputLayout {
- lazy val srcDir = projectRoot / "src"
- /**
- * An utility method to easily create StandardPathConfig from a given path layout
- */
- def /(name: String)= new StandardPathConfig(this, name)
-}
-
-/**
- *
- */
-class StandardPathConfig(layout: PathLayout, name: String) extends PathConfig {
- lazy val projectRoot = layout.projectRoot
- lazy val sources = layout.srcDir / name
- lazy val analysis = layout.analysisOutput / name
- lazy val output = layout.classesOutput / name
-}
diff --git a/project/build/SVN.scala b/project/build/SVN.scala
deleted file mode 100644
index 427469eb64..0000000000
--- a/project/build/SVN.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-
-/**
- * @param root the root of an svn repository
- * @author Moix Grégory
- */
-class SVN(root: Path) {
- /** Location of tool which parses svn revision in git-svn repository. */
- val GitSvnRevTool = root / "tools" / "get-scala-revision"
- val GitSvnRegex = """^Revision:\s*(\d+).*""".r
-
- /**
- * Gets the revision number of the repository given through the constructor of the class
- * It assumes that svn or git is installed on the running computer. Return 0 if it was not
- * able to found the revision number
- */
- def getRevisionNumber: Int = getSvn orElse getGit getOrElse 0
- def getSvn: Option[Int] = {
- /** Doing this the hard way trying to suppress the svn error message
- * on stderr. Could not figure out how to do it simply in sbt.
- */
- val pb = new java.lang.ProcessBuilder("svn", "info")
- pb directory root.asFile
- pb redirectErrorStream true
-
- Process(pb).lines_! foreach {
- case GitSvnRegex(rev) => return Some(rev.toInt)
- case _ => ()
- }
- None
- }
-
- def getGit: Option[Int] =
- try { Some(Process(GitSvnRevTool.toString, root).!!.trim.toInt) }
- catch { case _: Exception => None }
-}
diff --git a/project/build/ScalaBuildProject.scala b/project/build/ScalaBuildProject.scala
deleted file mode 100644
index 250ad7a429..0000000000
--- a/project/build/ScalaBuildProject.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-
-
-abstract class ScalaBuildProject extends Project {
- lazy val projectRoot = info.projectPath
- lazy val layerOutput = outputRootPath / name
- lazy val pathLayout = new PathLayout(projectRoot, layerOutput)
-
- lazy val manifestPath = projectRoot/"META-INF"/"MANIFEST.MF"
-
- lazy val lib = projectRoot / "lib"
- lazy val forkJoinJar = lib / forkjoinJarName
- lazy val jlineJar = lib / jlineJarName
- lazy val antJar = lib / "ant" / "ant.jar"
- lazy val fjbgJar = lib / fjbgJarName
- lazy val msilJar = lib / msilJarName
-
-}
-
-object ScalaBuildProject {
- // Some path definitions related strings
- val compilerJarName = "scala-compiler.jar"
- val libraryJarName = "scala-library.jar"
- val scalacheckJarName = "scalacheck.jar"
- val scalapJarName = "scalap.jar"
- val dbcJarName = "scala-dbc.jar"
- val swingJarName = "scala-swing.jar"
- val partestJarName = "scala-partest.jar"
- val fjbgJarName = "fjbg.jar"
- val msilJarName = "msil.jar"
- val jlineJarName = "jline.jar"
- val forkjoinJarName = "forkjoin.jar"
-
-
-}
diff --git a/project/build/ScalaSBTBuilder.scala b/project/build/ScalaSBTBuilder.scala
deleted file mode 100644
index 881e8c8452..0000000000
--- a/project/build/ScalaSBTBuilder.scala
+++ /dev/null
@@ -1,362 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-import ScalaSBTBuilder._
-
-/**
- * This class is the entry point for building scala with SBT.
- * @author Grégory Moix
- */
-class ScalaSBTBuilder(val info: ProjectInfo)
- extends Project
- with ReflectiveProject
- with BasicDependencyProject
- // with IdeaProject
- with MavenStyleScalaPaths {
- /** This secret system property turns off transitive dependencies during change
- * detection. It's a short term measure. BE AWARE! That means you can no longer
- * trust sbt to recompile everything: it's only recompiling changed files.
- * (The alternative is that adding a space to TraversableLike incurs a 10+ minute
- * incremental build, which means sbt doesn't get used at all, so this is better.)
- */
- System.setProperty("sbt.intransitive", "true")
-
- // Required by BasicDependencyProject
- def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath
-
- override def dependencies: Iterable[Project] = (
- info.dependencies ++
- locker.dependencies ++
- quick.dependencies ++
- strap.dependencies ++
- libs.dependencies
- )
- override def shouldCheckOutputDirectories = false
-
- // Support of triggered execution at top level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
-
- // Top Level Tasks
- lazy val buildFjbg = libs.buildFjbg.describedAs(buildFjbgTaskDescription)
- lazy val buildForkjoin = libs.buildForkjoin.describedAs(buildForkjoinTaskDescription)
- lazy val buildMsil = libs.buildMsil.describedAs(buildMislTaskDescription)
- lazy val clean = quick.clean.dependsOn(libs.clean).describedAs(cleanTaskDescription)
- lazy val cleanAll = locker.clean.dependsOn(libs.clean).describedAs(cleanAllTaskDescription)
- lazy val compile = task {None}.dependsOn(quick.binPack, quick.binQuick).describedAs(buildTaskDescription)
- lazy val docs = quick.scaladoc.describedAs(docsTaskDescription)
- lazy val newFjbg = libs.newFjbg.describedAs(newFjbgTaskDescription)
- lazy val newForkjoin = libs.newForkjoin.describedAs(newForkjoinTaskDescription)
- lazy val newLocker = locker.newLocker.describedAs(newLockerTaskDescription)
- lazy val newMsil = libs.newMsil.describedAs(newMsilTaskDescription)
- lazy val newStarr = quick.newStarr.describedAs(newStarrTaskDescription)
- lazy val palo = locker.pack.describedAs(paloTaskDescription)
- lazy val pasta = quick.pasta.describedAs(pastaTaskDescription)
- lazy val stabilityTest = strap.stabilityTest.describedAs(stabilityTestTaskDescription)
- lazy val test = quick.externalPartest.describedAs(partestTaskDescription)
-
- // Non-standard names for tasks chosen earlier which I point at the standard ones.
- lazy val build = compile
- lazy val partest = test
-
- // Top level variables
-
- /**
- * The version number of the compiler that will be created by the run of sbt. It is initialised once
- * the first time it is needed, meaning that this number will be kept
- * until sbt quit.
- */
- lazy val versionNumber: String ={
- def getTimeString: String = {
- import java.util.Calendar;
- import java.text.SimpleDateFormat;
- val formatString = "yyyyMMddHHmmss"
- new SimpleDateFormat(formatString) format Calendar.getInstance.getTime
- }
- def getVersion: String = projectVersion.value.toString takeWhile (_ != '-') mkString
- def getRevision: Int = new SVN(info.projectPath) getRevisionNumber
-
- getVersion+".r"+getRevision+"-b"+getTimeString
- }
-
- /* LAYER DEFINITIONS
- * We define here what's specific to each layer are they differ.
- * The common behavior is defined in the BasicLayer class
- * It is important that the class that extends BasicLayer are inner classes of ScalaSBTBuilder. If not, SBT will
- * not know what the main project definition is, as it will find many classes that extends Project
- */
-
- lazy val locker = project(info.projectPath,"locker", new LockerLayer(_))
- lazy val quick = project(info.projectPath,"quick", new QuickLayer(_, locker))
- lazy val strap = project(info.projectPath,"strap", new StrapLayer(_, quick))
- lazy val libs = project(info.projectPath,"libs", new LibsBuilder(_))
-
-
- /**
- * Definition of what is specific to the locker layer. It implements SimplePacker in order to
- * be able to create palo (packed locker)
- */
- class LockerLayer(info: ProjectInfo) extends BasicLayer(info, versionNumber, None) with Packer {
-
-
- override lazy val nextLayer = Some(quick)
- lazy val instantiationCompilerJar = lib / compilerJarName
- lazy val instantiationLibraryJar = lib / libraryJarName
- lazy val lockFile = layerOutput / "locker.lock"
-
- /**
- * We override the definition of the task method in order to make the tasks of this layer
- * be executed only if the layer is not locked. Task of this layer that should be executed
- * whether the layer is locked or not should call super.task instead
- */
- override def task(action : => Option[String])=
- super.task {
- if (lockFile.exists) {
- log.info(name +" is locked")
- None
- }
- else action
- }
-
- def deleteLock = FileUtilities.clean(lockFile, log)
- def createLock = {
- log.info("locking "+name)
- FileUtilities.touch(lockFile, log)
- }
-
- /**
- * Task for locking locker
- */
- lazy val lock = super.task {
- createLock
- }
-
- /**
- * Task for unlocking locker
- */
- lazy val unlock = super.task {
- deleteLock
- }
-
- lazy val newLocker = super.task {
- createNewLocker
- }
- def createNewLocker = {
- deleteLock orElse
- buildLayer orElse
- createLock
- }
-
-
- /**
- * Making locker being locked when it has finished building
- */
- override lazy val finishLayer = lock.dependsOn(build)
-
- override lazy val pack = super.task {packF}.dependsOn(finishLayer)
-
-
- override lazy val packingDestination: Path = outputRootPath /"palo"
-
- override lazy val libraryWS = {
- new WrapperStep(libraryConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent)
- }
- }
- override val minimalCompilation = true
- override lazy val pluginsWS: WrapperStep = new WrapperStep(Nil)
- override lazy val toolsWS = new WrapperStep(Nil)
- }
-
-
- /**
- * Definition of what is specific to the quick layer. It implements Packer in order to create pack, ScalaTools
- * for creating the binaries and Scaladoc to generate the documentation
- */
- class QuickLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) with PartestRunner
- with Packer with ScalaTools with Scaladoc {
-
- override lazy val nextLayer = Some(strap)
-
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
-
-
- override lazy val packingDestination: Path = outputRootPath/ "pack"
-
- override def libraryToCopy = jlineJar :: Nil
- override def compilerAdditionalJars = msilJar :: fjbgJar :: Nil
- override def libraryAdditionalJars = forkJoinJar :: Nil
-
- override def cleaningList = packedStarrOutput :: super.cleaningList
-
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with Packaging {
- def jarContent = List(libraryConfig , actorsConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val starrJarContent = List(libraryConfig , actorsConfig, dbcConfig, swingConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent, libraryAdditionalJars)
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/libraryJarName, starrJarContent)
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
- // An additional task for building only the library of quick
- // Used for compiling msil
- lazy val compileLibraryOnly = task {
- compile(libraryConfig, cleanCompilation _)
- }
- lazy val externalCompileLibraryOnly = task(maybeFork(compileLibraryOnly)) dependsOn startLayer
-
- def createNewStarrJar: Option[String] ={
- import Packer._
- createJar(libraryWS.starrPackagingConfig, log) orElse
- createJar(compilerConfig.starrPackagingConfig, log)
- }
- lazy val pasta = task {
- createNewStarrJar
- }.dependsOn(build)
-
- lazy val newStarr = task {
- val files = (packedStarrOutput ##) * "*.jar"
- FileUtilities.copy(files.get, lib, true, log) match {
- case Right(_) => None
- case Left(_) => Some("Error occured when copying the new starr to its destination")
- }
-
- }.dependsOn(pasta)
-
- /*
- * Defining here the creation of the binaries for quick and pack
- */
- private lazy val quickBinClasspath = libraryOutput :: actorsOutput :: dbcOutput :: swingOutput :: compilerOutput :: scalapOutput :: forkJoinJar :: fjbgJar :: msilJar :: jlineJar :: Nil
- private lazy val packBinClasspath = Nil
- lazy val binQuick = tools(layerOutput / "bin", quickBinClasspath).dependsOn(finishLayer)
- lazy val binPack = tools(packingDestination / "bin", packBinClasspath).dependsOn(pack)
- }
-
-
- /**
- * Definition of what is specific to the strap layer
- */
- class StrapLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) {
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
- private val quick = previous
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, Set())
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
-
- def compare = {
- import PathConfig.classes
- def filter(path: Path)= path.descendentsExcept(AllPassFilter, HiddenFileFilter || "*.properties")
- Comparator.compare(quick.pathLayout.outputDir/classes ##, this.pathLayout.outputDir/classes ##, filter _ , log)
- }
-
- lazy val stabilityTest = task {
- log.warn("Stability test must be run on a clean build in order to yield correct results.")
- compare
- }.dependsOn(finishLayer)
- }
-
- /**
- * An additional subproject used to build new version of forkjoin, fjbg and msil
- */
- class LibsBuilder(val info: ProjectInfo) extends ScalaBuildProject with ReflectiveProject with Compilation with BuildInfoEnvironment {
- override def dependencies = info.dependencies
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" ||AdditionalResources.basicFilter) // Support of triggered execution at project level
-
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- def instantiationCompilerJar: Path = locker.compilerOutput
- def instantiationLibraryJar: Path = locker.libraryOutput
-
- def libsDestination = layerOutput
-
- lazy val checkJavaVersion = task {
- val version = System.getProperty("java.version")
- log.debug("java.version ="+version)
- val required = "1.6"
- if (version.startsWith(required)) None else Some("Incompatible java version : required "+required)
- }
-
-
- private def simpleBuild(step: CompilationStep with Packaging)= task {
- import Packer._
- compile(step) orElse createJar(step, log)
- }.dependsOn(locker.finishLayer)
-
- private def copyJar(step: CompilationStep with Packaging, name: String) = task {
- FileUtilities.copyFile(step.packagingConfig.jarDestination, lib/name, log)
- }
-
- lazy val newForkjoin = copyJar(forkJoinConfig, forkjoinJarName).dependsOn(buildForkjoin)
- lazy val buildForkjoin = simpleBuild(forkJoinConfig).dependsOn(checkJavaVersion)
- lazy val newFjbg = copyJar(fjbgConfig, fjbgJarName).dependsOn(buildFjbg)
- lazy val buildFjbg = simpleBuild(fjbgConfig)
- lazy val newMsil = copyJar(msilConfig, msilJarName).dependsOn(buildMsil)
- // TODO As msil contains scala files, maybe needed compile it with an ExternalSBTRunner
- lazy val buildMsil = simpleBuild(msilConfig).dependsOn(quick.externalCompileLibraryOnly)
-
- lazy val forkJoinConfig = new CompilationStep("forkjoin", pathLayout, log) with Packaging {
- def label = "new forkjoin library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.5","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/forkjoinJarName, List(outputDirectory ##))
- }
-
- lazy val fjbgConfig = new CompilationStep("fjbg", pathLayout, log) with Packaging {
- def label = "new fjbg library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/fjbgJarName, List(outputDirectory ##))
-
- }
-
- lazy val msilConfig = new CompilationStep("msil", pathLayout, log) with Packaging {
- def label = "new msil library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java" |"*.scala", ".svn" |"tests")
- def dependencies = Seq()
- override def classpath = super.classpath +++ quick.libraryOutput
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/msilJarName, List(outputDirectory ##))
-
- }
-
- def cleaningList = layerOutput :: layerEnvironment.envBackingPath :: Nil
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- lazy val clean: Task = task {cleanFiles}// We use super.task, so cleaning is done in every case, even when locked
-
- }
-}
-object ScalaSBTBuilder {
- val buildTaskDescription = "build locker, lock it, build quick and create pack. It is the equivalent command to 'ant build'."
- val cleanTaskDescription = "clean the outputs of quick and strap. locker remains untouched."
- val cleanAllTaskDescription = "same as clean, but in addition clean locker too."
- val docsTaskDescription = "generate the scaladoc"
- val partestTaskDescription = "run partest"
- val stabilityTestTaskDescription = "run stability testing. It is required to use a clean build (for example, execute the clean-all action) in order to ensure correctness of the result."
- val paloTaskDescription = "create palo"
- val pastaTaskDescription = "create all the jar needed to make a new starr from quick (pasta = packed starr). It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/pasta"
- val newStarrTaskDescription = "create a new starr and replace the library and compiler jars in the libs folder. It will keep locker locked, meaning that if you want to update locker after updating starr, you must run the 'new-locker' command. It will not automatically run partest and stability testing before replacing."
- val newLockerTaskDescription = "replace locker. It will build a new locker. It does not automatically rebuild quick."
- val buildForkjoinTaskDescription = "create all the jar needed to make a new forkjoin. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newForkjoinTaskDescription = "create a new forkjoin and replace the corresponding jar in the libs folder."
- val buildFjbgTaskDescription = "create all the jar needed to make a new fjbg. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newFjbgTaskDescription = "create a new fjbg and replace the corresponding jar in the libs folder."
- val buildMislTaskDescription = "create all the jar needed to make a new msil. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newMsilTaskDescription = "create a msil and replace the corresponding jar in the libs folder."
-}
diff --git a/project/build/ScalaTools.scala b/project/build/ScalaTools.scala
deleted file mode 100644
index d74639d63a..0000000000
--- a/project/build/ScalaTools.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-import java.io.{FileInputStream, File, InputStream, FileWriter}
-import sbt._
-import scala.io._
-
-/**
- * Create the scala binaries
- * Based on scala.tools.ant.ScalaTool
- * @author Grégory Moix (for the sbt adaptation)
- */
-trait ScalaTools {
- self: BasicLayer =>
-
- lazy val templatesLocation = compilerConfig.srcDir/ "scala" / "tools" / "ant" / "templates"
- lazy val unixTemplate = templatesLocation / "tool-unix.tmpl"
- lazy val winTemplate = templatesLocation / "tool-windows.tmpl"
-
-
- // XXX encoding and generalize
- private def getResourceAsCharStream(resource: Path): Stream[Char] = {
- val stream = new FileInputStream(resource.asFile)
- def streamReader(): Stream[Char] = stream.read match {
- case -1 => Stream.empty
- case value => Stream.cons(value.asInstanceOf[Char], streamReader())
-
- }
- if (stream == null) {
- log.debug("Stream was null")
- Stream.empty
- }
-
- //else Stream continually stream.read() takeWhile (_ != -1) map (_.asInstanceOf[Char]) // Does not work in scala 2.7.7
- else streamReader
- }
-
-
- // Converts a variable like @SCALA_HOME@ to ${SCALA_HOME} when pre = "${" and post = "}"
- private def transposeVariableMarkup(text: String, pre: String, post: String) : String = {
- val chars = Source.fromString(text)
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (token.toString == "")
- builder.append('@')
- else
- builder.append(pre + token.toString + post)
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def readAndPatchResource(resource: Path, tokens: Map[String, String]): String = {
- val chars = getResourceAsCharStream(resource).elements
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (tokens.contains(token.toString))
- builder.append(tokens(token.toString))
- else if (token.toString == "")
- builder.append('@')
- else
- builder.append("@" + token.toString + "@")
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def writeFile(file: File, content: String, makeExecutable: Boolean): Option[String] =
- if (file.exists() && !file.canWrite())
- Some("File " + file + " is not writable")
- else {
- val writer = new FileWriter(file, false)
- writer.write(content)
- writer.close()
- file.setExecutable(makeExecutable)
- None
- }
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getUnixclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}")
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getWinclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%")
-
- /** Performs the tool creation of a tool with for a given os
- * @param file
- * @param mainClas
- * @param properties
- * @param javaFlags
- * @param toolFlags
- * @param classPath
- * @param template
- * @param classpathFormater
- */
- private def tool(template: Path, classpathFormater: List[String] => String, file: Path, mainClass: String,
- properties: String, javaFlags: String, toolFlags: String, classPath: List[Path], makeExecutable: Boolean): Option[String] = {
- val patches = Map (
- ("class", mainClass),
- ("properties", properties),
- ("javaflags", javaFlags),
- ("toolflags", toolFlags),
- ("classpath", classpathFormater(classPath.map(_.absolutePath)))
- )
-
- val result = readAndPatchResource(template, patches)
- writeFile(file.asFile, result, makeExecutable)
-
- }
- private def generateTool(config: ToolConfiguration): Option[String] =
- generateTool(config.toolName, config.destination, config.mainClass, config.properties, config.javaFlags, config.toolFlags, config.classPath)
-
- private def generateTool(toolName: String, destination: Path, mainClass: String, properties: String, javaFlags: String, toolFlags: String, classPath: List[Path]): Option[String] ={
- val unixFile = destination / toolName
- val winFile = destination /(toolName + ".bat")
- tool(unixTemplate, getUnixclasspath, unixFile, mainClass, properties, javaFlags, toolFlags, classPath, true) orElse
- tool(winTemplate, getWinclasspath, winFile, mainClass, properties, javaFlags, toolFlags, classPath, false)
- }
-
-
- /*============================================================================*\
- ** Definition of the different tools **
- \*============================================================================*/
- private val defaultJavaFlags = "-Xmx256M -Xms32M"
-
- /**
- * A class that holds the different parameters of a tool
- */
- class ToolConfiguration(val toolName: String, val destination: Path, val mainClass: String, val properties: String, val javaFlags: String, val toolFlags: String, val classPath: List[Path])
-
- /**
- * Generate all tools
- * @param destination Root folder where all the binaries will be written
- * @param classpath Should be specified when you want to use a specific classpath, could be Nil if you want
- * to make the bin use what is in the lib folder of the distribution.
- */
- def tools(destination: Path, classpath: List[Path]) = task {
- val scala = new ToolConfiguration("scala", destination, "scala.tools.nsc.MainGenericRunner", "",defaultJavaFlags, "", classpath)
- val scalac = new ToolConfiguration("scalac", destination, "scala.tools.nsc.Main", "",defaultJavaFlags, "", classpath)
- val scaladoc = new ToolConfiguration("scaladoc",destination,"scala.tools.nsc.ScalaDoc", "",defaultJavaFlags,"", classpath)
- val fsc = new ToolConfiguration("fsc", destination,"scala.tools.nsc.CompileClient", "",defaultJavaFlags, "", classpath)
- val scalap = new ToolConfiguration("scalap",destination, "scala.tools.scalap.Main", "",defaultJavaFlags, "", classpath)
-
-
- val toolList = scala :: scalac :: scaladoc :: fsc :: scalap :: Nil
-
- def process(list: List[ToolConfiguration]): Option[String] = list match {
- case x :: xs => {
- log.debug("Generating "+x.toolName+" bin")
- generateTool(x) orElse process(xs)
- }
- case Nil => None
-
- }
- FileUtilities.createDirectory(destination, log)
- process(toolList)
-
- }
-}
-
-
diff --git a/project/build/Scaladoc.scala b/project/build/Scaladoc.scala
deleted file mode 100644
index 39bcb5226e..0000000000
--- a/project/build/Scaladoc.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-import sbt._
-import xsbt.AnalyzingCompiler
-
-trait Scaladoc {
- self: BasicLayer with Packer =>
-
- lazy val documentationDestination = outputRootPath / "scaladoc"
- lazy val libraryDocumentationDestination = documentationDestination / "library"
- lazy val compilerDocumentationDestination = documentationDestination / "compiler"
- lazy val libraryDoc = {
- val reflect = librarySrcDir / "scala" / "reflect"
- val runtime = librarySrcDir / "scala" / "runtime"
-
- ((librarySrcDir +++ actorsSrcDir +++ swingSrcDir)**("*.scala")---
- reflect / "Code.scala" ---
- reflect / "Manifest.scala" ---
- reflect / "Print.scala" ---
- reflect / "Symbol.scala" ---
- reflect / "Tree.scala" ---
- reflect / "Type.scala" ---
- reflect / "TypedCode.scala" ---
- runtime /"ScalaRunTime.scala" ---
- runtime / "StreamCons.scala" ---
- runtime / "StringAdd.scala" ---
- runtime * ("*$.scala") ---
- runtime *("*Array.scala")
- )
-
- }
- lazy val compilerDoc = {
- compilerSrcDir **("*.scala")
- }
- lazy val classpath ={
- (antJar +++ jlineJar +++ msilJar +++ fjbgJar +++ forkJoinJar +++ outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar ).get
-
- }
- lazy val scaladoc = task(maybeFork(generateScaladoc, "Error generating scaladoc")) dependsOn pack
-
- lazy val generateScaladoc = task {
- instanceScope[Option[String]]{ scala =>
- lazy val compiler = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
- val docGenerator = new sbt.Scaladoc(50, compiler)
- docGenerator("Scala "+ versionNumber+" API", libraryDoc.get, classpath, libraryDocumentationDestination, Seq(), log) orElse
- docGenerator("Scala Compiler"+ versionNumber+" API", compilerDoc.get, classpath, compilerDocumentationDestination, Seq(), log)
- }
- }
-
-}
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000000..b49ece7527
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,9 @@
+resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
+
+resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
+
+resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
+
+libraryDependencies += "net.databinder" %% "dispatch-http" % "0.8.6"
+
+
diff --git a/project/plugins/Plugins.scala b/project/plugins/Plugins.scala
deleted file mode 100644
index 15ee162329..0000000000
--- a/project/plugins/Plugins.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import sbt._
-
-class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
- val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
- val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.2.0"
-} \ No newline at end of file
diff --git a/project/project/Build.scala b/project/project/Build.scala
new file mode 100644
index 0000000000..1ceb7e2ef2
--- /dev/null
+++ b/project/project/Build.scala
@@ -0,0 +1,7 @@
+import sbt._
+object PluginDef extends Build {
+ override def projects = Seq(root)
+ lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
+ lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git")
+ lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
+}
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index 8d0379c095..090d0448f0 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -12,12 +12,16 @@ package scala.actors
/**
* @author Philipp Haller
+ * @note This class inherits a public var called 'msg' from ReactorTask,
+ * and also defines a constructor parameter which shadows it (which makes any
+ * changes to the underlying var invisible.) I can't figure out what's supposed
+ * to happen, so I renamed the constructor parameter to at least be less confusing.
*/
private[actors] class ActorTask(actor: Actor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
- msg: Any)
- extends ReplyReactorTask(actor, fun, handler, msg) {
+ initialMsg: Any)
+ extends ReplyReactorTask(actor, fun, handler, initialMsg) {
protected override def beginExecution() {
super.beginExecution()
@@ -31,8 +35,11 @@ private[actors] class ActorTask(actor: Actor,
val senderInfo = try { Some(actor.sender) } catch {
case _: Exception => None
}
+ // !!! If this is supposed to be setting the current contents of the
+ // inherited mutable var rather than always the value given in the constructor,
+ // then it should be changed from initialMsg to msg.
val uncaught = UncaughtException(actor,
- if (msg != null) Some(msg) else None,
+ if (initialMsg != null) Some(initialMsg) else None,
senderInfo,
Thread.currentThread,
e)
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
index 1db722f89b..cb63d7e000 100644
--- a/src/actors/scala/actors/ReplyReactorTask.scala
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -12,18 +12,25 @@ package scala.actors
/**
* @author Philipp Haller
+ * @note This class inherits a public var called 'reactor' from ReactorTask,
+ * and also defines a constructor parameter which shadows it (which makes any
+ * changes to the underlying var invisible.) I can't figure out what's supposed
+ * to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ReplyReactorTask(reactor: ReplyReactor,
+private[actors] class ReplyReactorTask(replyReactor: ReplyReactor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
msg: Any)
- extends ReactorTask(reactor, fun, handler, msg) {
+ extends ReactorTask(replyReactor, fun, handler, msg) {
var saved: ReplyReactor = _
protected override def beginExecution() {
saved = Actor.tl.get
- Actor.tl set reactor
+ // !!! If this is supposed to be setting the current contents of the
+ // inherited mutable var rather than always the value given in the constructor,
+ // then it should be changed to "set reactor".
+ Actor.tl set replyReactor
}
protected override def suspendExecution() {
diff --git a/src/compiler/scala/reflect/internal/Chars.scala b/src/compiler/scala/reflect/internal/Chars.scala
index 7bd37618ed..f2c90a6721 100644
--- a/src/compiler/scala/reflect/internal/Chars.scala
+++ b/src/compiler/scala/reflect/internal/Chars.scala
@@ -21,27 +21,31 @@ trait Chars {
final val SU = '\u001A'
/** Convert a character digit to an Int according to given base,
- * -1 if no success */
+ * -1 if no success
+ */
def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
+ val num = (
+ if (ch <= '9') ch - '0'
+ else if ('a' <= ch && ch <= 'z') ch - 'a' + 10
+ else if ('A' <= ch && ch <= 'Z') ch - 'A' + 10
+ else -1
+ )
+ if (0 <= num && num < base) num else -1
}
+ /** Buffer for creating '\ u XXXX' strings. */
+ private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0)
/** Convert a character to a backslash-u escape */
def char2uescape(c: Char): String = {
- var rest = c.toInt
- val buf = new StringBuilder
- for (i <- 1 to 4) {
- buf ++= (rest % 16).toHexString
- rest = rest / 16
- }
- "\\u" + buf.toString.reverse
+ @inline def hexChar(ch: Int): Char =
+ ( if (ch < 10) '0' else 'A' - 10 ) + ch toChar
+
+ char2uescapeArray(2) = hexChar((c >> 12) )
+ char2uescapeArray(3) = hexChar((c >> 8) % 16)
+ char2uescapeArray(4) = hexChar((c >> 4) % 16)
+ char2uescapeArray(5) = hexChar((c ) % 16)
+
+ new String(char2uescapeArray)
}
/** Is character a line break? */
diff --git a/src/compiler/scala/reflect/internal/Constants.scala b/src/compiler/scala/reflect/internal/Constants.scala
index 2edb0d1fe6..9c4b2b2245 100644
--- a/src/compiler/scala/reflect/internal/Constants.scala
+++ b/src/compiler/scala/reflect/internal/Constants.scala
@@ -217,7 +217,7 @@ trait Constants extends api.Constants {
}
def escapedStringValue: String = {
- def escape(text: String): String = (text map escapedChar) mkString ""
+ def escape(text: String): String = text flatMap escapedChar
tag match {
case NullTag => "null"
case StringTag => "\"" + escape(stringValue) + "\""
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 4d71d2a769..d3af8e2623 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -6,6 +6,7 @@
package scala.reflect
package internal
+import annotation.{ switch }
import scala.collection.{ mutable, immutable }
import Flags._
import PartialFunction._
@@ -60,8 +61,8 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
- lazy val refClass = classesMap(x => getClass("scala.runtime." + x + "Ref"))
- lazy val volatileRefClass = classesMap(x => getClass("scala.runtime.Volatile" + x + "Ref"))
+ lazy val refClass = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
+ lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
lazy val boxMethod = classesMap(x => valueModuleMethod(x, nme.box))
lazy val unboxMethod = classesMap(x => valueModuleMethod(x, nme.unbox))
@@ -85,12 +86,6 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isGetClass(sym: Symbol) =
(sym.name == nme.getClass_) && (sym.paramss.isEmpty || sym.paramss.head.isEmpty)
- private[Definitions] def fullNameStrings: List[String] = nme.ScalaValueNames map ("scala." + _)
- private[Definitions] lazy val fullValueName: Set[Name] = {
- val values = nme.ScalaValueNames flatMap (x => List(newTypeName("scala." + x), newTermName("scala." + x)))
- values.toSet + newTypeName("scala.AnyVal")
- }
-
lazy val AnyValClass = valueCache(tpnme.AnyVal)
lazy val UnitClass = valueCache(tpnme.Unit)
lazy val ByteClass = valueCache(tpnme.Byte)
@@ -101,9 +96,9 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val FloatClass = valueCache(tpnme.Float)
lazy val DoubleClass = valueCache(tpnme.Double)
lazy val BooleanClass = valueCache(tpnme.Boolean)
- def Boolean_and = getMember(BooleanClass, nme.ZAND)
- def Boolean_or = getMember(BooleanClass, nme.ZOR)
- def Boolean_not = getMember(BooleanClass, nme.UNARY_!)
+ lazy val Boolean_and = getMember(BooleanClass, nme.ZAND)
+ lazy val Boolean_or = getMember(BooleanClass, nme.ZOR)
+ lazy val Boolean_not = getMember(BooleanClass, nme.UNARY_!)
def ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
def ScalaValueClasses: List[Symbol] = List(
@@ -151,8 +146,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val ScalaPackage = getModule(nme.scala_)
lazy val ScalaPackageClass = ScalaPackage.moduleClass
- lazy val RuntimePackage = getModule("scala.runtime")
+ lazy val RuntimePackage = getRequiredModule("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass
+
+ lazy val JavaLangEnumClass = getRequiredClass("java.lang.Enum")
// convenient one-argument parameter lists
lazy val anyparam = List(AnyClass.typeConstructor)
@@ -163,17 +160,45 @@ trait Definitions extends reflect.api.StandardDefinitions {
private def booltype = BooleanClass.typeConstructor
private def inttype = IntClass.typeConstructor
private def stringtype = StringClass.typeConstructor
+
+ // Java types
+ def javaTypeName(jclazz: Class[_]): TypeName = newTypeName(jclazz.getName)
+
+ def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
+ case java.lang.Void.TYPE => UnitClass
+ case java.lang.Byte.TYPE => ByteClass
+ case java.lang.Character.TYPE => CharClass
+ case java.lang.Short.TYPE => ShortClass
+ case java.lang.Integer.TYPE => IntClass
+ case java.lang.Long.TYPE => LongClass
+ case java.lang.Float.TYPE => FloatClass
+ case java.lang.Double.TYPE => DoubleClass
+ case java.lang.Boolean.TYPE => BooleanClass
+ case _ => NoSymbol
+ }
+ def valueClassToJavaType(sym: Symbol): Class[_] = sym match {
+ case UnitClass => java.lang.Void.TYPE
+ case ByteClass => java.lang.Byte.TYPE
+ case CharClass => java.lang.Character.TYPE
+ case ShortClass => java.lang.Short.TYPE
+ case IntClass => java.lang.Integer.TYPE
+ case LongClass => java.lang.Long.TYPE
+ case FloatClass => java.lang.Float.TYPE
+ case DoubleClass => java.lang.Double.TYPE
+ case BooleanClass => java.lang.Boolean.TYPE
+ case _ => null
+ }
// top types
lazy val AnyClass = newClass(ScalaPackageClass, tpnme.Any, Nil) setFlag (ABSTRACT)
lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.typeConstructor)
lazy val ObjectClass = getClass(sn.Object)
- lazy val AnyCompanionClass = getClass("scala.AnyCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
- lazy val AnyValCompanionClass = getClass("scala.AnyValCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
+ lazy val AnyCompanionClass = getRequiredClass("scala.AnyCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
+ lazy val AnyValCompanionClass = getRequiredClass("scala.AnyValCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
// bottom types
- lazy val RuntimeNothingClass = getClass(ClassfileConstants.SCALA_NOTHING)
- lazy val RuntimeNullClass = getClass(ClassfileConstants.SCALA_NULL)
+ lazy val RuntimeNothingClass = getClass(fulltpnme.RuntimeNothing)
+ lazy val RuntimeNullClass = getClass(fulltpnme.RuntimeNull)
sealed abstract class BottomClassSymbol(name: TypeName, parent: Symbol) extends ClassSymbol(ScalaPackageClass, NoPosition, name) {
locally {
@@ -194,25 +219,25 @@ trait Definitions extends reflect.api.StandardDefinitions {
}
// exceptions and other throwables
- lazy val ClassCastExceptionClass = getClass("java.lang.ClassCastException")
+ lazy val ClassCastExceptionClass = getRequiredClass("java.lang.ClassCastException")
lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
lazy val InvocationTargetExceptionClass = getClass(sn.InvTargetException)
- lazy val MatchErrorClass = getClass("scala.MatchError")
- lazy val NonLocalReturnControlClass = getClass("scala.runtime.NonLocalReturnControl")
+ lazy val MatchErrorClass = getRequiredClass("scala.MatchError")
+ lazy val NonLocalReturnControlClass = getRequiredClass("scala.runtime.NonLocalReturnControl")
lazy val NullPointerExceptionClass = getClass(sn.NPException)
lazy val ThrowableClass = getClass(sn.Throwable)
- lazy val UninitializedErrorClass = getClass("scala.UninitializedFieldError")
+ lazy val UninitializedErrorClass = getRequiredClass("scala.UninitializedFieldError")
// fundamental reference classes
lazy val ScalaObjectClass = getMember(ScalaPackageClass, tpnme.ScalaObject)
- lazy val PartialFunctionClass = getClass("scala.PartialFunction")
- lazy val AbstractPartialFunctionClass = getClass("scala.runtime.AbstractPartialFunction")
- lazy val SymbolClass = getClass("scala.Symbol")
+ lazy val PartialFunctionClass = getRequiredClass("scala.PartialFunction")
+ lazy val AbstractPartialFunctionClass = getRequiredClass("scala.runtime.AbstractPartialFunction")
+ lazy val SymbolClass = getRequiredClass("scala.Symbol")
lazy val StringClass = getClass(sn.String)
lazy val StringModule = StringClass.linkedClassOfClass
lazy val ClassClass = getClass(sn.Class)
def Class_getMethod = getMember(ClassClass, nme.getMethod_)
- lazy val DynamicClass = getClass("scala.Dynamic")
+ lazy val DynamicClass = getRequiredClass("scala.Dynamic")
// fundamental modules
lazy val SysPackage = getPackageObject("scala.sys")
@@ -223,7 +248,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Those modules and their module classes
lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
- lazy val PredefModule: Symbol = getModule("scala.Predef")
+ lazy val PredefModule: Symbol = getRequiredModule("scala.Predef")
lazy val PredefModuleClass = PredefModule.moduleClass
// Note: this is not the type alias AnyRef, it's a val defined in Predef
// used by the @specialize annotation.
@@ -232,26 +257,37 @@ trait Definitions extends reflect.api.StandardDefinitions {
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray)
- lazy val ConsoleModule: Symbol = getModule("scala.Console")
- lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime")
- lazy val SymbolModule: Symbol = getModule("scala.Symbol")
- lazy val Symbol_apply = getMember(SymbolModule, nme.apply)
+
+ /** Is `sym` a member of Predef with the given name?
+ * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
+ * which does a member lookup (it can't be a lazy val because we might reload Predef
+ * during resident compilations).
+ */
+ def isPredefMemberNamed(sym: Symbol, name: Name) = (
+ (sym.name == name) && (sym.owner == PredefModule.moduleClass)
+ )
+
+ lazy val ConsoleModule: Symbol = getRequiredModule("scala.Console")
+ lazy val ScalaRunTimeModule: Symbol = getRequiredModule("scala.runtime.ScalaRunTime")
+ lazy val SymbolModule: Symbol = getRequiredModule("scala.Symbol")
+ lazy val Symbol_apply = SymbolModule.info decl nme.apply
+
def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
- def arrayApplyMethod = getMember(ScalaRunTimeModule, "array_apply")
- def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
- def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
- def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
- def ensureAccessibleMethod = getMember(ScalaRunTimeModule, "ensureAccessible")
+ def arrayApplyMethod = getMember(ScalaRunTimeModule, nme.array_apply)
+ def arrayUpdateMethod = getMember(ScalaRunTimeModule, nme.array_update)
+ def arrayLengthMethod = getMember(ScalaRunTimeModule, nme.array_length)
+ def arrayCloneMethod = getMember(ScalaRunTimeModule, nme.array_clone)
+ def ensureAccessibleMethod = getMember(ScalaRunTimeModule, nme.ensureAccessible)
def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
// classes with special meanings
- lazy val StringAddClass = getClass("scala.runtime.StringAdd")
- lazy val ArrowAssocClass = getClass("scala.Predef.ArrowAssoc")
+ lazy val StringAddClass = getRequiredClass("scala.runtime.StringAdd")
+ lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc")
lazy val StringAdd_+ = getMember(StringAddClass, nme.PLUS)
- lazy val NotNullClass = getClass("scala.NotNull")
- lazy val ScalaNumberClass = getClass("scala.math.ScalaNumber")
- lazy val TraitSetterAnnotationClass = getClass("scala.runtime.TraitSetter")
- lazy val DelayedInitClass = getClass("scala.DelayedInit")
+ lazy val NotNullClass = getRequiredClass("scala.NotNull")
+ lazy val ScalaNumberClass = getRequiredClass("scala.math.ScalaNumber")
+ lazy val TraitSetterAnnotationClass = getRequiredClass("scala.runtime.TraitSetter")
+ lazy val DelayedInitClass = getRequiredClass("scala.DelayedInit")
def delayedInitMethod = getMember(DelayedInitClass, nme.delayedInit)
// a dummy value that communicates that a delayedInit call is compiler-generated
// from phase UnCurry to phase Constructors
@@ -259,14 +295,14 @@ trait Definitions extends reflect.api.StandardDefinitions {
// def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
// .setInfo(UnitClass.tpe)
- lazy val TypeConstraintClass = getClass("scala.annotation.TypeConstraint")
+ lazy val TypeConstraintClass = getRequiredClass("scala.annotation.TypeConstraint")
lazy val SingletonClass = newClass(ScalaPackageClass, tpnme.Singleton, anyparam) setFlag (ABSTRACT | TRAIT | FINAL)
- lazy val SerializableClass = getClass("scala.Serializable")
+ lazy val SerializableClass = getRequiredClass("scala.Serializable")
lazy val JavaSerializableClass = getClass(sn.JavaSerializable)
- lazy val ComparableClass = getClass("java.lang.Comparable")
- lazy val JavaCloneableClass = getClass("java.lang.Cloneable")
- lazy val RemoteInterfaceClass = getClass("java.rmi.Remote")
- lazy val RemoteExceptionClass = getClass("java.rmi.RemoteException")
+ lazy val ComparableClass = getRequiredClass("java.lang.Comparable")
+ lazy val JavaCloneableClass = getRequiredClass("java.lang.Cloneable")
+ lazy val RemoteInterfaceClass = getRequiredClass("java.rmi.Remote")
+ lazy val RemoteExceptionClass = getRequiredClass("java.rmi.RemoteException")
lazy val RepeatedParamClass = newCovariantPolyClass(
ScalaPackageClass,
@@ -286,6 +322,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
+ def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
def isJavaVarArgs(params: List[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe)
def isScalaVarArgs(params: List[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
def isVarArgsList(params: List[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
@@ -315,66 +352,92 @@ trait Definitions extends reflect.api.StandardDefinitions {
val clazz = newClass(ScalaPackageClass, tpnme.EQUALS_PATTERN_NAME, Nil)
clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, new Scope, clazz))
}
+ lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
// collections classes
- lazy val ConsClass = getClass("scala.collection.immutable.$colon$colon")
- lazy val IterableClass = getClass("scala.collection.Iterable")
- lazy val IteratorClass = getClass("scala.collection.Iterator")
- lazy val ListClass = getClass("scala.collection.immutable.List")
- lazy val SeqClass = getClass("scala.collection.Seq")
- lazy val StringBuilderClass = getClass("scala.collection.mutable.StringBuilder")
- lazy val TraversableClass = getClass("scala.collection.Traversable")
-
- lazy val ListModule = getModule("scala.collection.immutable.List")
+ lazy val ConsClass = getRequiredClass("scala.collection.immutable.$colon$colon")
+ lazy val IterableClass = getRequiredClass("scala.collection.Iterable")
+ lazy val IteratorClass = getRequiredClass("scala.collection.Iterator")
+ lazy val ListClass = getRequiredClass("scala.collection.immutable.List")
+ lazy val SeqClass = getRequiredClass("scala.collection.Seq")
+ lazy val StringBuilderClass = getRequiredClass("scala.collection.mutable.StringBuilder")
+ lazy val TraversableClass = getRequiredClass("scala.collection.Traversable")
+
+ lazy val ListModule = getRequiredModule("scala.collection.immutable.List")
lazy val List_apply = getMember(ListModule, nme.apply)
- lazy val NilModule = getModule("scala.collection.immutable.Nil")
- lazy val SeqModule = getModule("scala.collection.Seq")
+ lazy val NilModule = getRequiredModule("scala.collection.immutable.Nil")
+ lazy val SeqModule = getRequiredModule("scala.collection.Seq")
+ lazy val IteratorModule = getRequiredModule("scala.collection.Iterator")
+ lazy val Iterator_apply = getMember(IteratorModule, nme.apply)
// arrays and their members
- lazy val ArrayModule = getModule("scala.Array")
- def ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
- lazy val ArrayClass = getClass("scala.Array")
- def Array_apply = getMember(ArrayClass, nme.apply)
- def Array_update = getMember(ArrayClass, nme.update)
- def Array_length = getMember(ArrayClass, nme.length)
- lazy val Array_clone = getMember(ArrayClass, nme.clone_)
+ lazy val ArrayModule = getRequiredModule("scala.Array")
+ lazy val ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
+ lazy val ArrayClass = getRequiredClass("scala.Array")
+ lazy val Array_apply = getMember(ArrayClass, nme.apply)
+ lazy val Array_update = getMember(ArrayClass, nme.update)
+ lazy val Array_length = getMember(ArrayClass, nme.length)
+ lazy val Array_clone = getMember(ArrayClass, nme.clone_)
// reflection / structural types
- lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
- lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
+ lazy val SoftReferenceClass = getRequiredClass("java.lang.ref.SoftReference")
+ lazy val WeakReferenceClass = getRequiredClass("java.lang.ref.WeakReference")
lazy val MethodClass = getClass(sn.MethodAsObject)
def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
- lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
- lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
+ lazy val EmptyMethodCacheClass = getRequiredClass("scala.runtime.EmptyMethodCache")
+ lazy val MethodCacheClass = getRequiredClass("scala.runtime.MethodCache")
def methodCache_find = getMember(MethodCacheClass, nme.find_)
def methodCache_add = getMember(MethodCacheClass, nme.add_)
// scala.reflect
- lazy val ReflectApiUniverse = getClass("scala.reflect.api.Universe")
- lazy val ReflectRuntimeMirror = getModule("scala.reflect.runtime.Mirror")
- def freeValueMethod = getMember(ReflectRuntimeMirror, "freeValue")
+ lazy val ReflectApiUniverse = getRequiredClass("scala.reflect.api.Universe")
+ lazy val ReflectRuntimeMirror = getRequiredModule("scala.reflect.runtime.Mirror")
+ def freeValueMethod = getMember(ReflectRuntimeMirror, nme.freeValue)
lazy val ReflectPackage = getPackageObject("scala.reflect")
- def Reflect_mirror = getMember(ReflectPackage, "mirror")
-
-
- lazy val PartialManifestClass = getClass("scala.reflect.ClassManifest")
- lazy val PartialManifestModule = getModule("scala.reflect.ClassManifest")
- lazy val FullManifestClass = getClass("scala.reflect.Manifest")
- lazy val FullManifestModule = getModule("scala.reflect.Manifest")
- lazy val OptManifestClass = getClass("scala.reflect.OptManifest")
- lazy val NoManifest = getModule("scala.reflect.NoManifest")
+ def Reflect_mirror = getMember(ReflectPackage, nme.mirror)
+
+ lazy val PartialManifestClass = getRequiredClass("scala.reflect.ClassManifest")
+ lazy val PartialManifestModule = getRequiredModule("scala.reflect.ClassManifest")
+ lazy val FullManifestClass = getRequiredClass("scala.reflect.Manifest")
+ lazy val FullManifestModule = getRequiredModule("scala.reflect.Manifest")
+ lazy val OptManifestClass = getRequiredClass("scala.reflect.OptManifest")
+ lazy val NoManifest = getRequiredModule("scala.reflect.NoManifest")
lazy val CodeClass = getClass(sn.Code)
lazy val CodeModule = getModule(sn.Code)
- def Code_lift = getMember(CodeModule, nme.lift_)
+ lazy val Code_lift = getMember(CodeModule, nme.lift_)
- lazy val ScalaSignatureAnnotation = getClass("scala.reflect.ScalaSignature")
- lazy val ScalaLongSignatureAnnotation = getClass("scala.reflect.ScalaLongSignature")
+ lazy val ScalaSignatureAnnotation = getRequiredClass("scala.reflect.ScalaSignature")
+ lazy val ScalaLongSignatureAnnotation = getRequiredClass("scala.reflect.ScalaLongSignature")
// Option classes
- lazy val OptionClass: Symbol = getClass("scala.Option")
- lazy val SomeClass: Symbol = getClass("scala.Some")
- lazy val NoneModule: Symbol = getModule("scala.None")
- lazy val SomeModule: Symbol = getModule("scala.Some")
+ lazy val OptionClass: Symbol = getRequiredClass("scala.Option")
+ lazy val SomeClass: Symbol = getRequiredClass("scala.Some")
+ lazy val NoneModule: Symbol = getRequiredModule("scala.None")
+ lazy val SomeModule: Symbol = getRequiredModule("scala.Some")
+
+ /** Note: don't use this manifest/type function for anything important,
+ * as it is incomplete. Would love to have things like existential types
+ * working, but very unfortunately the manifests just stuff the relevant
+ * information into the toString method.
+ */
+ def manifestToType(m: OptManifest[_]): Type = m match {
+ case x: AnyValManifest[_] =>
+ getClassIfDefined("scala." + x).tpe
+ case m: ClassManifest[_] =>
+ val name = m.erasure.getName
+ if (name endsWith nme.MODULE_SUFFIX_STRING)
+ getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING).tpe
+ else {
+ val sym = getClassIfDefined(name)
+ val args = m.typeArguments
+
+ if (sym eq NoSymbol) NoType
+ else if (args.isEmpty) sym.tpe
+ else appliedType(sym.typeConstructor, args map manifestToType)
+ }
+ case _ =>
+ NoType
+ }
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
@@ -406,7 +469,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// Product, Tuple, Function
private def mkArityArray(name: String, arity: Int, countFrom: Int = 1): Array[Symbol] = {
- val list = countFrom to arity map (i => getClass("scala." + name + i))
+ val list = countFrom to arity map (i => getRequiredClass("scala." + name + i))
if (countFrom == 0) list.toArray
else (NoSymbol +: list).toArray
}
@@ -422,10 +485,24 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
lazy val isProductNClass = ProductClass.toSet
+ def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match {
+ case ByteClass => nme.wrapByteArray
+ case ShortClass => nme.wrapShortArray
+ case CharClass => nme.wrapCharArray
+ case IntClass => nme.wrapIntArray
+ case LongClass => nme.wrapLongArray
+ case FloatClass => nme.wrapFloatArray
+ case DoubleClass => nme.wrapDoubleArray
+ case BooleanClass => nme.wrapBooleanArray
+ case UnitClass => nme.wrapUnitArray
+ case _ =>
+ if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
- def tupleField(n: Int, j: Int) = getMember(TupleClass(n), "_" + j)
- def isTupleType(tp: Type): Boolean = isTupleType(tp, false)
- def isTupleTypeOrSubtype(tp: Type): Boolean = isTupleType(tp, true)
+ def tupleField(n: Int, j: Int) = getMember(TupleClass(n), nme.productAccessorName(j))
+ def isTupleType(tp: Type): Boolean = isTupleType(tp, false)
+ def isTupleTypeOrSubtype(tp: Type): Boolean = isTupleType(tp, true)
private def isTupleType(tp: Type, subtypeOK: Boolean) = tp.normalize match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val len = args.length
@@ -444,7 +521,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
} else NoType
}
- lazy val ProductRootClass: Symbol = getClass("scala.Product")
+ lazy val ProductRootClass: Symbol = getRequiredClass("scala.Product")
def Product_productArity = getMember(ProductRootClass, nme.productArity)
def Product_productElement = getMember(ProductRootClass, nme.productElement)
// def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
@@ -513,9 +590,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
case _ => NoType
}
- def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg))
- def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg))
- def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
+ def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg))
+ def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg))
+ def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
+ def iteratorOfType(tp: Type) = appliedType(IteratorClass.typeConstructor, List(tp))
def ClassType(arg: Type) =
if (phase.erasedTypes || forMSIL) ClassClass.tpe
@@ -525,7 +603,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
// .NET backend
//
- lazy val ComparatorClass = getClass("scala.runtime.Comparator")
+ lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
// System.ValueType
lazy val ValueTypeClass: Symbol = getClass(sn.ValueType)
// System.MulticastDelegate
@@ -570,10 +648,10 @@ trait Definitions extends reflect.api.StandardDefinitions {
var Object_## : Symbol = _
var Object_synchronized: Symbol = _
lazy val Object_isInstanceOf = newPolyMethod(
- ObjectClass, "$isInstanceOf",
+ ObjectClass, newTermName("$isInstanceOf"),
tparam => MethodType(List(), booltype)) setFlag (FINAL | SYNTHETIC)
lazy val Object_asInstanceOf = newPolyMethod(
- ObjectClass, "$asInstanceOf",
+ ObjectClass, newTermName("$asInstanceOf"),
tparam => MethodType(List(), tparam.typeConstructor)) setFlag (FINAL | SYNTHETIC)
def Object_getClass = getMember(ObjectClass, nme.getClass_)
@@ -588,57 +666,57 @@ trait Definitions extends reflect.api.StandardDefinitions {
var String_+ : Symbol = _
// boxed classes
- lazy val ObjectRefClass = getClass("scala.runtime.ObjectRef")
- lazy val VolatileObjectRefClass = getClass("scala.runtime.VolatileObjectRef")
- lazy val BoxesRunTimeClass = getModule("scala.runtime.BoxesRunTime")
+ lazy val ObjectRefClass = getRequiredClass("scala.runtime.ObjectRef")
+ lazy val VolatileObjectRefClass = getRequiredClass("scala.runtime.VolatileObjectRef")
+ lazy val BoxesRunTimeClass = getRequiredModule("scala.runtime.BoxesRunTime")
lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
- lazy val BoxedByteClass = getClass("java.lang.Byte")
- lazy val BoxedShortClass = getClass("java.lang.Short")
- lazy val BoxedIntClass = getClass("java.lang.Integer")
- lazy val BoxedLongClass = getClass("java.lang.Long")
- lazy val BoxedFloatClass = getClass("java.lang.Float")
- lazy val BoxedDoubleClass = getClass("java.lang.Double")
-
- lazy val BoxedUnitClass = getClass("scala.runtime.BoxedUnit")
- lazy val BoxedUnitModule = getModule("scala.runtime.BoxedUnit")
- def BoxedUnit_UNIT = getMember(BoxedUnitModule, "UNIT")
- def BoxedUnit_TYPE = getMember(BoxedUnitModule, "TYPE")
+ lazy val BoxedByteClass = getRequiredClass("java.lang.Byte")
+ lazy val BoxedShortClass = getRequiredClass("java.lang.Short")
+ lazy val BoxedIntClass = getRequiredClass("java.lang.Integer")
+ lazy val BoxedLongClass = getRequiredClass("java.lang.Long")
+ lazy val BoxedFloatClass = getRequiredClass("java.lang.Float")
+ lazy val BoxedDoubleClass = getRequiredClass("java.lang.Double")
+
+ lazy val BoxedUnitClass = getRequiredClass("scala.runtime.BoxedUnit")
+ lazy val BoxedUnitModule = getRequiredModule("scala.runtime.BoxedUnit")
+ def BoxedUnit_UNIT = getMember(BoxedUnitModule, nme.UNIT)
+ def BoxedUnit_TYPE = getMember(BoxedUnitModule, nme.TYPE_)
// Annotation base classes
- lazy val AnnotationClass = getClass("scala.annotation.Annotation")
- lazy val ClassfileAnnotationClass = getClass("scala.annotation.ClassfileAnnotation")
- lazy val StaticAnnotationClass = getClass("scala.annotation.StaticAnnotation")
+ lazy val AnnotationClass = getRequiredClass("scala.annotation.Annotation")
+ lazy val ClassfileAnnotationClass = getRequiredClass("scala.annotation.ClassfileAnnotation")
+ lazy val StaticAnnotationClass = getRequiredClass("scala.annotation.StaticAnnotation")
// Annotations
- lazy val BridgeClass = getClass("scala.annotation.bridge")
- lazy val ElidableMethodClass = getClass("scala.annotation.elidable")
- lazy val ImplicitNotFoundClass = getClass("scala.annotation.implicitNotFound")
- lazy val MigrationAnnotationClass = getClass("scala.annotation.migration")
- lazy val ScalaStrictFPAttr = getClass("scala.annotation.strictfp")
- lazy val SerializableAttr = getClass("scala.annotation.serializable") // @serializable is deprecated
- lazy val SwitchClass = getClass("scala.annotation.switch")
- lazy val TailrecClass = getClass("scala.annotation.tailrec")
- lazy val VarargsClass = getClass("scala.annotation.varargs")
- lazy val uncheckedStableClass = getClass("scala.annotation.unchecked.uncheckedStable")
- lazy val uncheckedVarianceClass = getClass("scala.annotation.unchecked.uncheckedVariance")
-
- lazy val BeanPropertyAttr = getClass("scala.beans.BeanProperty")
- lazy val BooleanBeanPropertyAttr = getClass("scala.beans.BooleanBeanProperty")
- lazy val CloneableAttr = getClass("scala.cloneable")
- lazy val DeprecatedAttr = getClass("scala.deprecated")
- lazy val DeprecatedNameAttr = getClass("scala.deprecatedName")
- lazy val NativeAttr = getClass("scala.native")
- lazy val RemoteAttr = getClass("scala.remote")
- lazy val ScalaInlineClass = getClass("scala.inline")
- lazy val ScalaNoInlineClass = getClass("scala.noinline")
- lazy val SerialVersionUIDAttr = getClass("scala.SerialVersionUID")
- lazy val SpecializedClass = getClass("scala.specialized")
- lazy val ThrowsClass = getClass("scala.throws")
- lazy val TransientAttr = getClass("scala.transient")
- lazy val UncheckedClass = getClass("scala.unchecked")
- lazy val VolatileAttr = getClass("scala.volatile")
+ lazy val BridgeClass = getRequiredClass("scala.annotation.bridge")
+ lazy val ElidableMethodClass = getRequiredClass("scala.annotation.elidable")
+ lazy val ImplicitNotFoundClass = getRequiredClass("scala.annotation.implicitNotFound")
+ lazy val MigrationAnnotationClass = getRequiredClass("scala.annotation.migration")
+ lazy val ScalaStrictFPAttr = getRequiredClass("scala.annotation.strictfp")
+ lazy val SerializableAttr = getRequiredClass("scala.annotation.serializable") // @serializable is deprecated
+ lazy val SwitchClass = getRequiredClass("scala.annotation.switch")
+ lazy val TailrecClass = getRequiredClass("scala.annotation.tailrec")
+ lazy val VarargsClass = getRequiredClass("scala.annotation.varargs")
+ lazy val uncheckedStableClass = getRequiredClass("scala.annotation.unchecked.uncheckedStable")
+ lazy val uncheckedVarianceClass = getRequiredClass("scala.annotation.unchecked.uncheckedVariance")
+
+ lazy val BeanPropertyAttr = getRequiredClass("scala.beans.BeanProperty")
+ lazy val BooleanBeanPropertyAttr = getRequiredClass("scala.beans.BooleanBeanProperty")
+ lazy val CloneableAttr = getRequiredClass("scala.cloneable")
+ lazy val DeprecatedAttr = getRequiredClass("scala.deprecated")
+ lazy val DeprecatedNameAttr = getRequiredClass("scala.deprecatedName")
+ lazy val NativeAttr = getRequiredClass("scala.native")
+ lazy val RemoteAttr = getRequiredClass("scala.remote")
+ lazy val ScalaInlineClass = getRequiredClass("scala.inline")
+ lazy val ScalaNoInlineClass = getRequiredClass("scala.noinline")
+ lazy val SerialVersionUIDAttr = getRequiredClass("scala.SerialVersionUID")
+ lazy val SpecializedClass = getRequiredClass("scala.specialized")
+ lazy val ThrowsClass = getRequiredClass("scala.throws")
+ lazy val TransientAttr = getRequiredClass("scala.transient")
+ lazy val UncheckedClass = getRequiredClass("scala.unchecked")
+ lazy val VolatileAttr = getRequiredClass("scala.volatile")
// Meta-annotations
lazy val BeanGetterTargetClass = getMetaAnnotation("beanGetter")
@@ -649,7 +727,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
lazy val SetterTargetClass = getMetaAnnotation("setter")
// TODO: module, moduleClass? package, packageObject?
- private def getMetaAnnotation(name: String) = getClass("scala.annotation.meta." + name)
+ private def getMetaAnnotation(name: String) = getRequiredClass("scala.annotation.meta." + name)
def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || (
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
@@ -667,11 +745,11 @@ trait Definitions extends reflect.api.StandardDefinitions {
attr
}
- def getPackageObjectClass(fullname: Name): Symbol =
+ def getPackageObjectClass(fullname: String): Symbol =
getPackageObject(fullname).companionClass
- def getPackageObject(fullname: Name): Symbol =
- getModule(fullname).info member nme.PACKAGE
+ def getPackageObject(fullname: String): Symbol =
+ getModule(newTermName(fullname)).info member nme.PACKAGE
def getModule(fullname: Name): Symbol =
getModuleOrClass(fullname.toTermName)
@@ -681,11 +759,20 @@ trait Definitions extends reflect.api.StandardDefinitions {
while (result.isAliasType) result = result.info.typeSymbol
result
}
-
+
+ def getRequiredModule(fullname: String): Symbol =
+ getModule(newTermNameCached(fullname))
+ def getRequiredClass(fullname: String): Symbol =
+ getClass(newTypeNameCached(fullname))
+
+ def getClassIfDefined(fullname: String): Symbol =
+ getClassIfDefined(newTypeName(fullname))
def getClassIfDefined(fullname: Name): Symbol =
try getClass(fullname.toTypeName)
catch { case _: MissingRequirementError => NoSymbol }
+ def getModuleIfDefined(fullname: String): Symbol =
+ getModuleIfDefined(newTermName(fullname))
def getModuleIfDefined(fullname: Name): Symbol =
try getModule(fullname.toTermName)
catch { case _: MissingRequirementError => NoSymbol }
@@ -914,8 +1001,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
ObjectClass, nme.synchronized_,
tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL
- String_+ = newMethod(
- StringClass, "+", anyparam, stringtype) setFlag FINAL
+ String_+ = newMethod(StringClass, nme.raw.PLUS, anyparam, stringtype) setFlag FINAL
val forced = List( // force initialization of every symbol that is entered as a side effect
AnnotationDefaultAttr, // #2264
@@ -948,7 +1034,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
assert(forMSIL, "scalaCallers can only be created if target is .NET")
// object: reference to object on which to call (scala-)method
val paramTypes: List[Type] = List(ObjectClass.tpe)
- val name: String = "$scalaCaller$$" + nbScalaCallers
+ val name = newTermName("$scalaCaller$$" + nbScalaCallers)
// tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
// type parameter =-> a MethodType in this case
// TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 8366c6d63a..9e4f0431c3 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -7,6 +7,7 @@ package scala.reflect
package internal
import api.Modifier
+import scala.collection.{ mutable, immutable }
// Flags at each index of a flags Long. Those marked with /M are used in
// Parsers/JavaParsers and therefore definitely appear on Modifiers; but the
@@ -225,6 +226,7 @@ class Flags extends ModifierFlags {
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
+ final val BridgeAndPrivateFlags = BridgeFlags | PRIVATE
/** When a symbol for a field is created, only these flags survive
* from Modifiers. Others which may be applied at creation time are:
@@ -426,8 +428,29 @@ class Flags extends ModifierFlags {
List(flagsToString(f), pw) filterNot (_ == "") mkString " "
}
- def flagsToString(flags: Long): String =
- pickledListOrder map (mask => flagToString(flags & mask)) filterNot (_ == "") mkString " "
+ // List of the raw flags, in pickled order
+ protected final val MaxBitPosition = 62
+
+ def flagsToString(flags: Long): String = {
+ // Fast path for common case
+ if (flags == 0L) "" else {
+ var sb: StringBuilder = null
+ var i = 0
+ while (i <= MaxBitPosition) {
+ val mask = rawFlagPickledOrder(i)
+ if ((flags & mask) != 0L) {
+ val s = flagToString(mask)
+ if (s.length > 0) {
+ if (sb eq null) sb = new StringBuilder append s
+ else if (sb.length == 0) sb append s
+ else sb append " " append s
+ }
+ }
+ i += 1
+ }
+ if (sb eq null) "" else sb.toString
+ }
+ }
def rawFlagsToPickled(flags: Long): Long =
(flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
@@ -435,13 +458,13 @@ class Flags extends ModifierFlags {
def pickledToRawFlags(pflags: Long): Long =
(pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
- // List of the raw flags, in pickled order
- protected val pickledListOrder: List[Long] = {
- val all = 0 to 62 map (1L << _)
+ protected final val pickledListOrder: List[Long] = {
+ val all = 0 to MaxBitPosition map (1L << _)
val front = rawFlags map (_.toLong)
front.toList ++ (all filterNot (front contains _))
}
+ protected final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
def flagOfModifier(mod: Modifier.Value): Long = mod match {
case Modifier.`protected` => PROTECTED
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index 60b353a7c4..38f808cef9 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -20,8 +20,8 @@ trait Importers { self: SymbolTable =>
def importSymbol(sym: from.Symbol): Symbol = {
def doImport(sym: from.Symbol): Symbol = {
val myowner = importSymbol(sym.owner)
- val mypos = importPosition(sym.pos)
- val myname = importName(sym.name)
+ val mypos = importPosition(sym.pos)
+ val myname = importName(sym.name).toTermName
def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
symMap(x) = mysym
mysym.referenced = op(x.referenced)
@@ -33,7 +33,7 @@ trait Importers { self: SymbolTable =>
case x: from.ModuleSymbol =>
linkReferenced(new ModuleSymbol(myowner, mypos, myname), x, doImport)
case x: from.FreeVar =>
- new FreeVar(importName(x.name), importType(x.tpe), x.value)
+ new FreeVar(importName(x.name).toTermName, importType(x.tpe), x.value)
case x: from.TermSymbol =>
linkReferenced(new TermSymbol(myowner, mypos, myname), x, importSymbol)
case x: from.TypeSkolem =>
@@ -161,13 +161,13 @@ trait Importers { self: SymbolTable =>
case from.RefinedType(parents, decls) =>
RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
case from.ExistentialType(tparams, restpe) =>
- ExistentialType(tparams map importSymbol, importType(restpe))
+ newExistentialType(tparams map importSymbol, importType(restpe))
case from.OverloadedType(pre, alts) =>
OverloadedType(importType(pre), alts map importSymbol)
case from.AntiPolyType(pre, targs) =>
AntiPolyType(importType(pre), targs map importType)
case x: from.TypeVar =>
- new TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
+ TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol)
case from.NotNullType(tpe) =>
NotNullType(importType(tpe))
case from.AnnotatedType(annots, tpe, selfsym) =>
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
new file mode 100644
index 0000000000..e675be43dc
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -0,0 +1,221 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
+
+trait Kinds {
+ self: SymbolTable =>
+
+ import definitions._
+
+ private type SymPair = ((Symbol, Symbol)) // ((Argument, Parameter))
+
+ case class KindErrors(
+ arity: List[SymPair] = Nil,
+ variance: List[SymPair] = Nil,
+ strictness: List[SymPair] = Nil
+ ) {
+ def isEmpty = arity.isEmpty && variance.isEmpty && strictness.isEmpty
+
+ def arityError(syms: SymPair) = copy(arity = arity :+ syms)
+ def varianceError(syms: SymPair) = copy(variance = variance :+ syms)
+ def strictnessError(syms: SymPair) = copy(strictness = strictness :+ syms)
+
+ def ++(errs: KindErrors) = KindErrors(
+ arity ++ errs.arity,
+ variance ++ errs.variance,
+ strictness ++ errs.strictness
+ )
+ // @M TODO this method is duplicated all over the place (varianceString)
+ private def varStr(s: Symbol): String =
+ if (s.isCovariant) "covariant"
+ else if (s.isContravariant) "contravariant"
+ else "invariant";
+
+ private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
+ if((a0 eq b0) || (a0.owner eq b0.owner)) ""
+ else {
+ var a = a0; var b = b0
+ while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
+ if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
+ }
+ }
+ private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
+ f(a+qualify(a,p), p+qualify(p,a))
+
+ private def strictnessMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format(
+ _, a.info, _, p.info))
+
+ private def varianceMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
+
+ private def arityMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s has %s, but %s has %s".format(
+ _, countElementsAsString(a.typeParams.length, "type parameter"),
+ _, countAsString(p.typeParams.length))
+ )
+
+ def errorMessage(targ: Type, tparam: Symbol): String = (
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ")
+ + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ")
+ + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ")
+ + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ")
+ )
+ }
+ val NoKindErrors = KindErrors(Nil, Nil, Nil)
+
+ // TODO: this desperately needs to be cleaned up
+ // plan: split into kind inference and subkinding
+ // every Type has a (cached) Kind
+ def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
+ checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+
+ /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
+ *
+ * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
+ */
+ private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
+ sym2.variance==0
+ || sym1.variance==sym2.variance
+ )
+
+ /** Check well-kindedness of type application (assumes arities are already checked) -- @M
+ *
+ * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
+ * (checked one type member at a time -- in that case, prefix is the name of the type alias)
+ *
+ * Type application is just like value application: it's "contravariant" in the sense that
+ * the type parameters of the supplied type arguments must conform to the type parameters of
+ * the required type parameters:
+ * - their bounds must be less strict
+ * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
+ * - @M TODO: are these conditions correct,sufficient&necessary?
+ *
+ * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
+ * List's type parameter is also covariant and its bounds are weaker than <: Int
+ */
+ def checkKindBounds0(
+ tparams: List[Symbol],
+ targs: List[Type],
+ pre: Type,
+ owner: Symbol,
+ explainErrors: Boolean
+ ): List[(Type, Symbol, KindErrors)] = {
+
+ // instantiate type params that come from outside the abstract type we're currently checking
+ def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz)
+
+ // check that the type parameters hkargs to a higher-kinded type conform to the
+ // expected params hkparams
+ def checkKindBoundsHK(
+ hkargs: List[Symbol],
+ arg: Symbol,
+ param: Symbol,
+ paramowner: Symbol,
+ underHKParams: List[Symbol],
+ withHKArgs: List[Symbol]
+ ): KindErrors = {
+
+ var kindErrors: KindErrors = NoKindErrors
+ def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
+ // @M sometimes hkargs != arg.typeParams, the symbol and the type may
+ // have very different type parameters
+ val hkparams = param.typeParams
+
+ def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
+ if (!cond)
+ kindErrors = f(kindErrors)
+ }
+
+ if (settings.debug.value) {
+ log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
+ log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
+ log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
+ }
+
+ if (!sameLength(hkargs, hkparams)) {
+ // Any and Nothing are kind-overloaded
+ if (arg == AnyClass || arg == NothingClass) NoKindErrors
+ // shortcut: always set error, whether explainTypesOrNot
+ else return kindErrors.arityError(arg -> param)
+ }
+ else foreach2(hkargs, hkparams) { (hkarg, hkparam) =>
+ if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
+ kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam))
+ // instantiateTypeParams(tparams, targs)
+ // higher-order bounds, may contain references to type arguments
+ // substSym(hkparams, hkargs)
+ // these types are going to be compared as types of kind *
+ //
+ // Their arguments use different symbols, but are
+ // conceptually the same. Could also replace the types by
+ // polytypes, but can't just strip the symbols, as ordering
+ // is lost then.
+ val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner)
+ val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner)
+ val argumentBounds = transform(hkarg.info.bounds, owner)
+
+ kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam))
+
+ debuglog(
+ "checkKindBoundsHK base case: " + hkparam +
+ " declared bounds: " + declaredBounds +
+ " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
+ "checkKindBoundsHK base case: "+ hkarg +
+ " has bounds: " + argumentBounds
+ )
+ }
+ else {
+ debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ kindErrors ++= checkKindBoundsHK(
+ hkarg.typeParams,
+ hkarg,
+ hkparam,
+ paramowner,
+ underHKParams ++ hkparam.typeParams,
+ withHKArgs ++ hkarg.typeParams
+ )
+ }
+ if (!explainErrors && !kindErrors.isEmpty)
+ return kindErrors
+ }
+ if (explainErrors) kindErrors
+ else NoKindErrors
+ }
+
+ if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log(
+ "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
+ + owner + ", " + explainErrors + ")"
+ )
+
+ flatMap2(tparams, targs) { (tparam, targ) =>
+ // Prevent WildcardType from causing kind errors, as typevars may be higher-order
+ if (targ == WildcardType) Nil else {
+ // force symbol load for #4205
+ targ.typeSymbolDirect.info
+ // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
+ val tparamsHO = targ.typeParams
+ if (targ.isHigherKinded || tparam.typeParams.nonEmpty) {
+ // NOTE: *not* targ.typeSymbol, which normalizes
+ val kindErrors = checkKindBoundsHK(
+ tparamsHO, targ.typeSymbolDirect, tparam,
+ tparam.owner, tparam.typeParams, tparamsHO
+ )
+ if (kindErrors.isEmpty) Nil else {
+ if (explainErrors) List((targ, tparam, kindErrors))
+ // Return as soon as an error is seen if there's nothing to explain.
+ else return List((NoType, NoSymbol, NoKindErrors))
+ }
+ }
+ else Nil
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/internal/NameManglers.scala b/src/compiler/scala/reflect/internal/NameManglers.scala
index 01a93c0ada..ef092f16bb 100644
--- a/src/compiler/scala/reflect/internal/NameManglers.scala
+++ b/src/compiler/scala/reflect/internal/NameManglers.scala
@@ -22,7 +22,10 @@ trait NameManglers {
val MODULE_SUFFIX_STRING = NameTransformer.MODULE_SUFFIX_STRING
val NAME_JOIN_STRING = NameTransformer.NAME_JOIN_STRING
-
+
+ val MODULE_SUFFIX_NAME: TermName = newTermName(MODULE_SUFFIX_STRING)
+ val NAME_JOIN_NAME: TermName = newTermName(NAME_JOIN_STRING)
+
def flattenedName(segments: Name*): NameType = compactedString(segments mkString NAME_JOIN_STRING)
/**
@@ -72,10 +75,13 @@ trait NameManglers {
val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SETTER_SUFFIX = encode("_=")
val SINGLETON_SUFFIX = ".type"
+ val SPECIALIZED_SUFFIX_STRING = "$sp"
val SUPER_PREFIX_STRING = "super$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+
+ val SETTER_SUFFIX: TermName = encode("_=")
+ val SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX_STRING
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
@@ -88,7 +94,7 @@ trait NameManglers {
def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX
- def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_STRING
+ def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_NAME
def isOpAssignmentName(name: Name) = name match {
case raw.NE | raw.LE | raw.GE | EMPTY => false
@@ -112,6 +118,17 @@ trait NameManglers {
name.subName(i, name.length)
} else name
}
+
+ def unspecializedName(name: Name): Name = (
+ if (name endsWith SPECIALIZED_SUFFIX_NAME)
+ name.subName(0, name.lastIndexOf('m') - 1)
+ else name
+ )
+
+ def macroMethodName(name: Name) = {
+ val base = if (name.isTypeName) nme.TYPEkw else nme.DEFkw
+ base append nme.MACRO append name
+ }
/** Return the original name and the types on which this name
* is specialized. For example,
@@ -123,8 +140,8 @@ trait NameManglers {
* and another one belonging to the enclosing class, on Double.
*/
def splitSpecializedName(name: Name): (Name, String, String) =
- if (name.endsWith("$sp")) {
- val name1 = name stripEnd "$sp"
+ if (name endsWith SPECIALIZED_SUFFIX_NAME) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX_NAME.length
val idxC = name1 lastIndexOf 'c'
val idxM = name1 lastIndexOf 'm'
@@ -135,16 +152,18 @@ trait NameManglers {
(name, "", "")
def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
- def getterToLocal(name: TermName): TermName = name.toTermName append LOCAL_SUFFIX_STRING
- def getterToSetter(name: TermName): TermName = name.toTermName append SETTER_SUFFIX
- def localToGetter(name: TermName): TermName = name stripEnd LOCAL_SUFFIX_STRING toTermName
+ def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
+ def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
+ def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
+
+ def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
def setterToGetter(name: TermName): TermName = {
val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
if (p < name.length)
- setterToGetter(name.subName(p + TRAIT_SETTER_SEPARATOR_STRING.length, name.length))
+ setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
else
- name stripEnd SETTER_SUFFIX toTermName
+ name.subName(0, name.length - SETTER_SUFFIX.length)
}
def defaultGetterName(name: Name, pos: Int): TermName = {
@@ -153,8 +172,8 @@ trait NameManglers {
}
def defaultGetterToMethod(name: Name): TermName = {
val p = name.pos(DEFAULT_GETTER_STRING)
- if (p < name.length) name.subName(0, p)
- else name
+ if (p < name.length) name.toTermName.subName(0, p)
+ else name.toTermName
}
/** !!! I'm putting this logic in place because I can witness
@@ -178,18 +197,14 @@ trait NameManglers {
}
def stripModuleSuffix(name: Name): Name = (
- if (isModuleName(name)) name stripEnd MODULE_SUFFIX_STRING else name
+ if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
)
- /** Note that for performance reasons, stripEnd does not verify that the
- * suffix is actually the suffix specified.
- */
- def dropSingletonName(name: Name): TypeName = name stripEnd SINGLETON_SUFFIX toTypeName
+ def dropSingletonName(name: Name): TypeName = name dropRight SINGLETON_SUFFIX.length toTypeName
def singletonName(name: Name): TypeName = name append SINGLETON_SUFFIX toTypeName
def implClassName(name: Name): TypeName = name append IMPL_CLASS_SUFFIX toTypeName
- def interfaceName(implname: Name): TypeName = implname stripEnd IMPL_CLASS_SUFFIX toTypeName
+ def interfaceName(implname: Name): TypeName = implname dropRight IMPL_CLASS_SUFFIX.length toTypeName
def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
- def productAccessorName(i: Int): TermName = newTermName("_" + i)
def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
/** The name of an accessor for protected symbols. */
diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala
index a6fd2adb34..b960695f51 100644
--- a/src/compiler/scala/reflect/internal/Names.scala
+++ b/src/compiler/scala/reflect/internal/Names.scala
@@ -71,39 +71,54 @@ trait Names extends api.Names {
}
/** Create a term name from the characters in cs[offset..offset+len-1]. */
- def newTermName(cs: Array[Char], offset: Int, len: Int): TermName = {
+ def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
+ newTermName(cs, offset, len, cachedString = null)
+
+ def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
+ def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
+
+ /** Create a term name from the characters in cs[offset..offset+len-1]. */
+ protected def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
n = n.next
- if (n eq null) {
+
+ if (n ne null) n
+ else {
// The logic order here is future-proofing against the possibility
// that name.toString will become an eager val, in which case the call
// to enterChars cannot follow the construction of the TermName.
val ncStart = nc
enterChars(cs, offset, len)
- n = new TermName(ncStart, len, h)
+ if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
+ else new TermName_R(ncStart, len, h)
}
- n
}
+ protected def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
+ newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
- def newTermName(s: String): TermName =
- newTermName(s.toCharArray(), 0, s.length())
+ def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
+
+ /** Create a type name from string. */
+ def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
- val chars = Codec fromUTF8 bs.slice(offset, offset + len)
+ val chars = Codec.fromUTF8(bs, offset, len)
newTermName(chars, 0, chars.length)
}
+ def newTermNameCached(s: String): TermName =
+ newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
+
+ def newTypeNameCached(s: String): TypeName =
+ newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
+
/** Create a type name from the characters in cs[offset..offset+len-1]. */
def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
- newTermName(cs, offset, len).toTypeName
-
- /** Create a type name from string. */
- def newTypeName(s: String): TypeName =
- newTermName(s).toTypeName
+ newTermName(cs, offset, len, cachedString = null).toTypeName
/** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
@@ -114,19 +129,27 @@ trait Names extends api.Names {
// Classes ----------------------------------------------------------------------
- /** The name class. */
+ /** The name class.
+ * TODO - resolve schizophrenia regarding whether to treat Names as Strings
+ * or Strings as Names. Give names the key functions the absence of which
+ * make people want Strings all the time.
+ */
sealed abstract class Name(protected val index: Int, protected val len: Int) extends AbsName with Function1[Int, Char] {
+ type ThisNameType <: Name
+ protected[this] def thisName: ThisNameType
+
/** Index into name table */
def start: Int = index
/** The next name in the same hash bucket. */
- def next: Name
+ def next: ThisNameType
/** The length of this name. */
final def length: Int = len
final def isEmpty = length == 0
final def nonEmpty = !isEmpty
+ def nameKind: String
def isTermName: Boolean
def isTypeName: Boolean
def toTermName: TermName
@@ -134,6 +157,15 @@ trait Names extends api.Names {
def companionName: Name
def bothNames: List[Name] = List(toTermName, toTypeName)
+ /** Return the subname with characters from from to to-1. */
+ def subName(from: Int, to: Int): ThisNameType
+
+ /** Return a new name of the same variety. */
+ def newName(str: String): ThisNameType
+
+ /** Return a new name based on string transformation. */
+ def mapName(f: String => String): ThisNameType = newName(f(toString))
+
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
compat.Platform.arraycopy(chrs, index, cs, offset, len)
@@ -145,21 +177,13 @@ trait Names extends api.Names {
cs
}
- /** @return the string representation of this name */
- final override def toString(): String = new String(chrs, index, len)
- // Should we opt to make toString into a val to avoid the creation
- // of 750,000 copies of x$1, here's the line.
- // final override val toString = new String(chrs, index, len)
-
- def debugString() = NameTransformer.decode(toString) + (if (isTypeName) "!" else "")
-
/** Write to UTF8 representation of this name to given character array.
* Start copying to index `to`. Return index of next free byte in array.
* Array must have enough remaining space for all bytes
* (i.e. maximally 3*length bytes).
*/
final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
- val bytes = Codec toUTF8 chrs.slice(index, index + len)
+ val bytes = Codec.toUTF8(chrs, index, len)
compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
offset + bytes.length
}
@@ -306,6 +330,16 @@ trait Names extends api.Names {
while (start <= last && !startsWith(subname, start)) start += 1
start <= last
}
+ final def containsChar(ch: Char): Boolean = {
+ var i = index
+ val max = index + len
+ while (i < max) {
+ if (chrs(i) == ch)
+ return true
+ i += 1
+ }
+ false
+ }
/** Some thoroughly self-explanatory convenience functions. They
* assume that what they're being asked to do is known to be valid.
@@ -316,15 +350,20 @@ trait Names extends api.Names {
final def startsWith(name: String): Boolean = startsWith(newTermName(name))
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
final def endsWith(name: String): Boolean = endsWith(newTermName(name))
- final def stripStart(prefix: Name): Name = subName(prefix.length, len)
- final def stripStart(prefix: String): Name = subName(prefix.length, len)
- final def stripEnd(suffix: Name): Name = subName(0, len - suffix.length)
- final def stripEnd(suffix: String): Name = subName(0, len - suffix.length)
-
- def lastIndexOf(ch: Char) = toChars lastIndexOf ch
- /** Return the subname with characters from from to to-1. */
- def subName(from: Int, to: Int): Name
+ def dropRight(n: Int) = subName(0, len - n)
+ def drop(n: Int) = subName(n, len)
+
+ def indexOf(ch: Char) = {
+ val idx = pos(ch)
+ if (idx == length) -1 else idx
+ }
+ def indexOf(ch: Char, fromIndex: Int) = {
+ val idx = pos(ch, fromIndex)
+ if (idx == length) -1 else idx
+ }
+ def lastIndexOf(ch: Char) = lastPos(ch)
+ def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
/** Replace all occurrences of `from` by `to` in
* name; result is always a term name.
@@ -339,30 +378,79 @@ trait Names extends api.Names {
}
newTermName(cs, 0, len)
}
+
+ /** TODO - reconcile/fix that encode returns a Name but
+ * decode returns a String.
+ */
/** Replace operator symbols by corresponding $op_name. */
- def encode: Name = {
+ def encode: ThisNameType = {
val str = toString
val res = NameTransformer.encode(str)
- if (res == str) this
- else if (isTypeName) newTypeName(res)
- else newTermName(res)
+ if (res == str) thisName else newName(res)
}
- def append(suffix: String): Name
- def append(suffix: Name): Name
-
/** Replace $op_name by corresponding operator symbol. */
- def decode: String = (
- NameTransformer.decode(toString) +
- (if (nameDebug && isTypeName) "!" else ""))//debug
+ def decode: String = {
+ if (this containsChar '$') {
+ val str = toString
+ val res = NameTransformer.decode(str)
+ if (res == str) str
+ else res
+ }
+ else toString
+ }
+
+ /** TODO - find some efficiency. */
+ def append(ch: Char) = newName("" + this + ch)
+ def append(suffix: String) = newName("" + this + suffix)
+ def append(suffix: Name) = newName("" + this + suffix)
+ def prepend(ch: Char) = newName("" + ch + this)
+ def prepend(prefix: String) = newName("" + prefix + this)
+ def prepend(prefix: Name) = newName("" + prefix + this)
+ def decodedName: ThisNameType = newName(decode)
def isOperatorName: Boolean = decode != toString
- def nameKind: String = if (isTypeName) "type" else "term"
- def longString: String = nameKind + " " + NameTransformer.decode(toString)
+ def longString: String = nameKind + " " + decode
+ def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
+ }
+
+ /** A name that contains no operator chars nor dollar signs.
+ * TODO - see if it's any faster to do something along these lines.
+ */
+ trait AlphaNumName extends Name {
+ final override def encode = thisName
+ final override def decodedName = thisName
+ final override def decode = toString
+ final override def isOperatorName = false
}
- final class TermName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
+ /** TermName_S and TypeName_S have fields containing the string version of the name.
+ * TermName_R and TypeName_R recreate it each time toString is called.
+ */
+ private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
+ override def newName(str: String): TermName = newTermNameCached(str)
+ }
+ private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
+ override def newName(str: String): TypeName = newTypeNameCached(str)
+ }
+
+ private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
+ }
+
+ private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
+ }
+
+ sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TermName
+ protected[this] def thisName: TermName = this
+
var next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
@@ -372,19 +460,24 @@ trait Names extends api.Names {
val h = hashValue(chrs, index, len) & HASH_MASK
var n = typeHashtable(h)
while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TypeName(index, len, h);
- n
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
}
- def append(suffix: String): TermName = newTermName(this + suffix)
- def append(suffix: Name): TermName = append(suffix.toString)
+ def newName(str: String): TermName = newTermName(str)
def companionName: TypeName = toTypeName
def subName(from: Int, to: Int): TermName =
newTermName(chrs, start + from, to - from)
+
+ def nameKind = "term"
+ protected def createCompanionName(h: Int): TypeName
}
- final class TypeName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
+ sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TypeName
+ protected[this] def thisName: TypeName = this
+
var next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
@@ -393,17 +486,19 @@ trait Names extends api.Names {
val h = hashValue(chrs, index, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TermName(index, len, h);
- n
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
}
def toTypeName: TypeName = this
-
- def append(suffix: String): TypeName = newTypeName(this + suffix)
- def append(suffix: Name): TypeName = append(suffix.toString)
+ def newName(str: String): TypeName = newTypeName(str)
def companionName: TermName = toTermName
def subName(from: Int, to: Int): TypeName =
newTypeName(chrs, start + from, to - from)
+
+ def nameKind = "type"
+ override def decode = if (nameDebug) super.decode + "!" else super.decode
+ protected def createCompanionName(h: Int): TermName
}
}
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 8afe276514..ea5565c581 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -8,18 +8,21 @@ package internal
import scala.collection.immutable
import NameTransformer.MODULE_SUFFIX_STRING
+import annotation.switch
-trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: SymbolTable =>
+trait StdNames extends NameManglers { self: SymbolTable =>
- def encode(str: String): TermName = newTermName(NameTransformer.encode(str))
+ def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
+
+ implicit def lowerTermNames(n: TermName): String = "" + n
- implicit def stringToTermName(s: String): TermName = newTermName(s)
+ // implicit def stringToTermName(s: String): TermName = newTermName(s)
/** This should be the first trait in the linearization. */
trait Keywords {
private var kws: Set[TermName] = Set()
private def kw(s: String): TermName = {
- val result = newTermName(s)
+ val result = newTermNameCached(s)
kws = kws + result
result
}
@@ -87,7 +90,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
trait CommonNames /*extends LibraryCommonNames*/ {
type NameType <: Name
- implicit def createNameType(name: String): NameType
+ protected implicit def createNameType(name: String): NameType
val EMPTY: NameType = ""
val ANON_FUN_NAME: NameType = "$anonfun"
@@ -146,6 +149,10 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val String: NameType = "String"
final val Throwable: NameType = "Throwable"
+ final val Annotation: NameType = "Annotation"
+ final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
+ final val Enum: NameType = "Enum"
+
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
@@ -172,123 +179,191 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val SyntheticATTR: NameType = "Synthetic"
}
-
trait TermNames extends Keywords with CommonNames {
// Compiler internal names
+ val EXPAND_SEPARATOR_STRING = "$$"
+
val ANYNAME: NameType = "<anyname>"
val CONSTRUCTOR: NameType = "<init>"
val FAKE_LOCAL_THIS: NameType = "this$"
val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
+ val LAZY_LOCAL: NameType = "$lzy"
+ val LOCAL_SUFFIX_STRING = " "
+ val MACRO: NameType = "macro$"
+ val MIRROR_PREFIX: NameType = "$mr."
+ val MIRROR_SHORT: NameType = "$mr"
val MIXIN_CONSTRUCTOR: NameType = "$init$"
val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: NameType = "$outer"
- val OUTER_LOCAL: NameType = "$outer " // note the space
+ val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
+ val SELECTOR_DUMMY: NameType = "<unapply-selector>"
val SELF: NameType = "$this"
val SPECIALIZED_INSTANCE: NameType = "specInstance$"
val STAR: NameType = "*"
val THIS: NameType = "_$this"
- val SELECTOR_DUMMY: NameType = "<unapply-selector>"
final val Nil: NameType = "Nil"
final val Predef: NameType = "Predef"
final val ScalaRunTime: NameType = "ScalaRunTime"
final val Some: NameType = "Some"
+
+ val _1 : NameType = "_1"
+ val _2 : NameType = "_2"
+ val _3 : NameType = "_3"
+ val _4 : NameType = "_4"
+ val _5 : NameType = "_5"
+ val _6 : NameType = "_6"
+ val _7 : NameType = "_7"
+ val _8 : NameType = "_8"
+ val _9 : NameType = "_9"
+ val _10 : NameType = "_10"
+ val _11 : NameType = "_11"
+ val _12 : NameType = "_12"
+ val _13 : NameType = "_13"
+ val _14 : NameType = "_14"
+ val _15 : NameType = "_15"
+ val _16 : NameType = "_16"
+ val _17 : NameType = "_17"
+ val _18 : NameType = "_18"
+ val _19 : NameType = "_19"
+ val _20 : NameType = "_20"
+ val _21 : NameType = "_21"
+ val _22 : NameType = "_22"
+
+ val wrapRefArray: NameType = "wrapRefArray"
+ val wrapByteArray: NameType = "wrapByteArray"
+ val wrapShortArray: NameType = "wrapShortArray"
+ val wrapCharArray: NameType = "wrapCharArray"
+ val wrapIntArray: NameType = "wrapIntArray"
+ val wrapLongArray: NameType = "wrapLongArray"
+ val wrapFloatArray: NameType = "wrapFloatArray"
+ val wrapDoubleArray: NameType = "wrapDoubleArray"
+ val wrapBooleanArray: NameType = "wrapBooleanArray"
+ val wrapUnitArray: NameType = "wrapUnitArray"
+ val genericWrapArray: NameType = "genericWrapArray"
// Compiler utilized names
// val productElementName: NameType = "productElementName"
- val TYPE_ : NameType = "TYPE"
- val add_ : NameType = "add"
- val anyValClass: NameType = "anyValClass"
- val append: NameType = "append"
- val apply: NameType = "apply"
- val arrayValue: NameType = "arrayValue"
- val arraycopy: NameType = "arraycopy"
- val asInstanceOf_ : NameType = "asInstanceOf"
- val assert_ : NameType = "assert"
- val assume_ : NameType = "assume"
- val box: NameType = "box"
- val bytes: NameType = "bytes"
- val canEqual_ : NameType = "canEqual"
- val checkInitialized: NameType = "checkInitialized"
- val classOf: NameType = "classOf"
- val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
- val conforms: NameType = "conforms"
- val copy: NameType = "copy"
- val delayedInit: NameType = "delayedInit"
- val delayedInitArg: NameType = "delayedInit$body"
- val drop: NameType = "drop"
- val elem: NameType = "elem"
- val eq: NameType = "eq"
- val equals_ : NameType = if (forMSIL) "Equals" else "equals"
- val error: NameType = "error"
- val ex: NameType = "ex"
- val false_ : NameType = "false"
- val filter: NameType = "filter"
- val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
- val find_ : NameType = "find"
- val flatMap: NameType = "flatMap"
- val foreach: NameType = "foreach"
- val formatted: NameType = "formatted"
- val genericArrayOps: NameType = "genericArrayOps"
- val get: NameType = "get"
- val hasNext: NameType = "hasNext"
- val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
- val hash_ : NameType = "hash"
- val head: NameType = "head"
- val identity: NameType = "identity"
- val inlinedEquals: NameType = "inlinedEquals"
- val applyDynamic: NameType = "applyDynamic"
- val isArray: NameType = "isArray"
- val isDefinedAt: NameType = "isDefinedAt"
- val _isDefinedAt: NameType = "_isDefinedAt"
- val isEmpty: NameType = "isEmpty"
- val isInstanceOf_ : NameType = "isInstanceOf"
- val java: NameType = "java"
- val lang: NameType = "lang"
- val length: NameType = "length"
- val lengthCompare: NameType = "lengthCompare"
- val lift_ : NameType = "lift"
- val macro_ : NameType = "macro"
- val main: NameType = "main"
- val map: NameType = "map"
- val missingCase: NameType = "missingCase"
- val ne: NameType = "ne"
- val newArray: NameType = "newArray"
- val next: NameType = "next"
- val notifyAll_ : NameType = "notifyAll"
- val notify_ : NameType = "notify"
- val null_ : NameType = "null"
- val ofDim: NameType = "ofDim"
- val productArity: NameType = "productArity"
- val productElement: NameType = "productElement"
- val productIterator: NameType = "productIterator"
- val productPrefix: NameType = "productPrefix"
- val readResolve: NameType = "readResolve"
- val runOrElse: NameType = "runOrElse"
- val sameElements: NameType = "sameElements"
- val scala_ : NameType = "scala"
- val self: NameType = "self"
- val setAccessible: NameType = "setAccessible"
- val synchronized_ : NameType = "synchronized"
- val tail: NameType = "tail"
- val this_ : NameType = "this"
- val throw_ : NameType = "throw"
- val toArray: NameType = "toArray"
- val toList: NameType = "toList"
- val toSeq: NameType = "toSeq"
- val toString_ : NameType = if (forMSIL) "ToString" else "toString"
- val true_ : NameType = "true"
- val unapply: NameType = "unapply"
- val unapplySeq: NameType = "unapplySeq"
- val unbox: NameType = "unbox"
- val update: NameType = "update"
- val value: NameType = "value"
- val view_ : NameType = "view"
- val wait_ : NameType = "wait"
- val withFilter: NameType = "withFilter"
- val wrapRefArray: NameType = "wrapRefArray"
- val zip: NameType = "zip"
+ val Ident: NameType = "Ident"
+ val TYPE_ : NameType = "TYPE"
+ val TypeTree: NameType = "TypeTree"
+ val UNIT : NameType = "UNIT"
+ val _isDefinedAt: NameType = "_isDefinedAt"
+ val add_ : NameType = "add"
+ val annotation: NameType = "annotation"
+ val anyValClass: NameType = "anyValClass"
+ val append: NameType = "append"
+ val apply: NameType = "apply"
+ val applyDynamic: NameType = "applyDynamic"
+ val args : NameType = "args"
+ val argv : NameType = "argv"
+ val arrayValue: NameType = "arrayValue"
+ val array_apply : NameType = "array_apply"
+ val array_clone : NameType = "array_clone"
+ val array_length : NameType = "array_length"
+ val array_update : NameType = "array_update"
+ val arraycopy: NameType = "arraycopy"
+ val asInstanceOf_ : NameType = "asInstanceOf"
+ val asTypeConstructor: NameType = "asTypeConstructor"
+ val assert_ : NameType = "assert"
+ val assume_ : NameType = "assume"
+ val box: NameType = "box"
+ val bytes: NameType = "bytes"
+ val canEqual_ : NameType = "canEqual"
+ val checkInitialized: NameType = "checkInitialized"
+ val classOf: NameType = "classOf"
+ val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val conforms: NameType = "conforms"
+ val copy: NameType = "copy"
+ val delayedInit: NameType = "delayedInit"
+ val delayedInitArg: NameType = "delayedInit$body"
+ val drop: NameType = "drop"
+ val elem: NameType = "elem"
+ val emptyValDef: NameType = "emptyValDef"
+ val ensureAccessible : NameType = "ensureAccessible"
+ val eq: NameType = "eq"
+ val equalsNumChar : NameType = "equalsNumChar"
+ val equalsNumNum : NameType = "equalsNumNum"
+ val equalsNumObject : NameType = "equalsNumObject"
+ val equals_ : NameType = if (forMSIL) "Equals" else "equals"
+ val error: NameType = "error"
+ val ex: NameType = "ex"
+ val false_ : NameType = "false"
+ val filter: NameType = "filter"
+ val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
+ val find_ : NameType = "find"
+ val flatMap: NameType = "flatMap"
+ val foreach: NameType = "foreach"
+ val formatted: NameType = "formatted"
+ val freeValue : NameType = "freeValue"
+ val genericArrayOps: NameType = "genericArrayOps"
+ val get: NameType = "get"
+ val glob : NameType = "glob"
+ val hasNext: NameType = "hasNext"
+ val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
+ val hash_ : NameType = "hash"
+ val head: NameType = "head"
+ val identity: NameType = "identity"
+ val inlinedEquals: NameType = "inlinedEquals"
+ val isArray: NameType = "isArray"
+ val isDefinedAt: NameType = "isDefinedAt"
+ val isEmpty: NameType = "isEmpty"
+ val isInstanceOf_ : NameType = "isInstanceOf"
+ val java: NameType = "java"
+ val lang: NameType = "lang"
+ val length: NameType = "length"
+ val lengthCompare: NameType = "lengthCompare"
+ val lift_ : NameType = "lift"
+ val macro_ : NameType = "macro"
+ val main: NameType = "main"
+ val map: NameType = "map"
+ val mirror : NameType = "mirror"
+ val missingCase: NameType = "missingCase"
+ val ne: NameType = "ne"
+ val newArray: NameType = "newArray"
+ val newScopeWith: NameType = "newScopeWith"
+ val next: NameType = "next"
+ val notifyAll_ : NameType = "notifyAll"
+ val notify_ : NameType = "notify"
+ val null_ : NameType = "null"
+ val ofDim: NameType = "ofDim"
+ val productArity: NameType = "productArity"
+ val productElement: NameType = "productElement"
+ val productIterator: NameType = "productIterator"
+ val productPrefix: NameType = "productPrefix"
+ val readResolve: NameType = "readResolve"
+ val runOrElse: NameType = "runOrElse"
+ val runtime: NameType = "runtime"
+ val sameElements: NameType = "sameElements"
+ val scala_ : NameType = "scala"
+ val self: NameType = "self"
+ val setAccessible: NameType = "setAccessible"
+ val setAnnotations: NameType = "setAnnotations"
+ val setTypeSig: NameType = "setTypeSig"
+ val synchronized_ : NameType = "synchronized"
+ val tail: NameType = "tail"
+ val thisModuleType: NameType = "thisModuleType"
+ val this_ : NameType = "this"
+ val throw_ : NameType = "throw"
+ val toArray: NameType = "toArray"
+ val toList: NameType = "toList"
+ val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
+ val toString_ : NameType = if (forMSIL) "ToString" else "toString"
+ val true_ : NameType = "true"
+ val typedProductIterator: NameType = "typedProductIterator"
+ val unapply: NameType = "unapply"
+ val unapplySeq: NameType = "unapplySeq"
+ val unbox: NameType = "unbox"
+ val update: NameType = "update"
+ val value: NameType = "value"
+ val valueOf : NameType = "valueOf"
+ val values : NameType = "values"
+ val view_ : NameType = "view"
+ val wait_ : NameType = "wait"
+ val withFilter: NameType = "withFilter"
+ val zip: NameType = "zip"
// unencoded operators
object raw {
@@ -316,21 +391,53 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val toLong: NameType = "toLong"
val toFloat: NameType = "toFloat"
val toDouble: NameType = "toDouble"
+
+ // primitive operation methods for structual types mostly
+ // overlap with the above, but not for these two.
+ val toCharacter: NameType = "toCharacter"
+ val toInteger: NameType = "toInteger"
}
object tpnme extends TypeNames /*with LibraryTypeNames*/ with TypeNameMangling {
type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
val REFINE_CLASS_NAME: NameType = "<refinement>"
val ANON_CLASS_NAME: NameType = "$anon"
}
+
+ /** For fully qualified type names.
+ */
+ object fulltpnme extends TypeNames {
+ type NameType = TypeName
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
+
+ val RuntimeNothing: NameType = "scala.runtime.Nothing$"
+ val RuntimeNull: NameType = "scala.runtime.Null$"
+ val JavaLangEnum: NameType = "java.lang.Enum"
+ }
+
+ /** Java binary names, like scala/runtime/Nothing$.
+ */
+ object binarynme {
+ def toBinary(name: Name) = name mapName (_.replace('.', '/'))
+
+ val RuntimeNothing = toBinary(fulltpnme.RuntimeNothing).toTypeName
+ val RuntimeNull = toBinary(fulltpnme.RuntimeNull).toTypeName
+ }
+
+ object fullnme extends TermNames {
+ type NameType = TermName
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
+
+ val MirrorPackage: NameType = "scala.reflect.mirror"
+ }
val javanme = nme.javaKeywords
object nme extends TermNames /*with LibraryTermNames*/ with TermNameMangling {
type NameType = TermName
- def createNameType(name: String): TermName = newTermName(name)
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Translate a String into a list of simple TypeNames and TermNames.
* In all segments before the last, type/term is determined by whether
@@ -363,7 +470,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
case -1 => if (name == "") scala.Nil else scala.List(mkName(name, assumeTerm))
// otherwise, we can tell based on whether '#' or '.' is the following char.
case idx =>
- val (simple, div, rest) = (name take idx, name charAt idx, name drop (idx + 1))
+ val (simple, div, rest) = (name take idx, name charAt idx, newTermName(name) drop (idx + 1))
mkName(simple, div == '.') :: segments(rest, assumeTerm)
}
}
@@ -380,26 +487,27 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
/** The expanded name of `name` relative to this class `base` with given `separator`
*/
def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
- newTermName(base.fullName('$') + separator + name)
+ newTermNameCached(base.fullName('$') + separator + name)
- def moduleVarName(name: TermName): TermName = newTermName("" + name + MODULE_VAR_SUFFIX)
-
- val EXPAND_SEPARATOR_STRING = "$$"
- val LOCAL_SUFFIX_STRING = " "
+ def moduleVarName(name: TermName): TermName =
+ newTermNameCached("" + name + MODULE_VAR_SUFFIX)
+
val ROOTPKG: TermName = "_root_"
/** Base strings from which synthetic names are derived. */
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DO_WHILE_PREFIX = "doWhile$"
- val EQEQ_LOCAL_VAR = "eqEqTemp$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
- val WHILE_PREFIX = "while$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EQEQ_LOCAL_VAR_STRING = "eqEqTemp$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val INTERPRETER_LINE_PREFIX = "line"
+ val INTERPRETER_VAR_PREFIX = "res"
+ val INTERPRETER_WRAPPER_SUFFIX = "$object"
+ val WHILE_PREFIX = "while$"
+
+ val EQEQ_LOCAL_VAR: TermName = newTermName(EQEQ_LOCAL_VAR_STRING)
def getCause = sn.GetCause
def getClass_ = sn.GetClass
@@ -424,8 +532,8 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val MUL = encode("*")
val NE = encode("!=")
val OR = encode("|")
- val PLUS = encode("+")
- val SUB = encode("-")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val SUB = MINUS // ... as does SUB with PLUS
val XOR = encode("^")
val ZAND = encode("&&")
val ZOR = encode("||")
@@ -435,10 +543,138 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
val UNARY_+ = encode("unary_+")
val UNARY_- = encode("unary_-")
val UNARY_! = encode("unary_!")
+
+ // Grouped here so Cleanup knows what tests to perform.
+ val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
+ val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
+ val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
+ val NumberOpNames = (
+ Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
+ ++ Set(UNARY_+, UNARY_-, UNARY_!)
+ ++ ConversionNames
+ ++ CommonOpNames
+ )
+
+ val add: NameType = "add"
+ val complement: NameType = "complement"
+ val divide: NameType = "divide"
+ val multiply: NameType = "multiply"
+ val negate: NameType = "negate"
+ val positive: NameType = "positive"
+ val shiftLogicalRight: NameType = "shiftLogicalRight"
+ val shiftSignedLeft: NameType = "shiftSignedLeft"
+ val shiftSignedRight: NameType = "shiftSignedRight"
+ val subtract: NameType = "subtract"
+ val takeAnd: NameType = "takeAnd"
+ val takeConditionalAnd: NameType = "takeConditionalAnd"
+ val takeConditionalOr: NameType = "takeConditionalOr"
+ val takeModulo: NameType = "takeModulo"
+ val takeNot: NameType = "takeNot"
+ val takeOr: NameType = "takeOr"
+ val takeXor: NameType = "takeXor"
+ val testEqual: NameType = "testEqual"
+ val testGreaterOrEqualThan: NameType = "testGreaterOrEqualThan"
+ val testGreaterThan: NameType = "testGreaterThan"
+ val testLessOrEqualThan: NameType = "testLessOrEqualThan"
+ val testLessThan: NameType = "testLessThan"
+ val testNotEqual: NameType = "testNotEqual"
+
+ val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean"
+ val isBoxedNumber: NameType = "isBoxedNumber"
+
+ def toUnaryName(name: TermName): TermName = name match {
+ case raw.MINUS => UNARY_-
+ case raw.PLUS => UNARY_+
+ case raw.TILDE => UNARY_~
+ case raw.BANG => UNARY_!
+ case _ => name
+ }
+ /** The name of a method which stands in for a primitive operation
+ * during structural type dispatch.
+ */
+ def primitiveInfixMethodName(name: Name): TermName = name match {
+ case OR => takeOr
+ case XOR => takeXor
+ case AND => takeAnd
+ case EQ => testEqual
+ case NE => testNotEqual
+ case ADD => add
+ case SUB => subtract
+ case MUL => multiply
+ case DIV => divide
+ case MOD => takeModulo
+ case LSL => shiftSignedLeft
+ case LSR => shiftLogicalRight
+ case ASR => shiftSignedRight
+ case LT => testLessThan
+ case LE => testLessOrEqualThan
+ case GE => testGreaterOrEqualThan
+ case GT => testGreaterThan
+ case ZOR => takeConditionalOr
+ case ZAND => takeConditionalAnd
+ case _ => NO_NAME
+ }
+ /** Postfix/prefix, really.
+ */
+ def primitivePostfixMethodName(name: Name): TermName = name match {
+ case UNARY_! => takeNot
+ case UNARY_+ => positive
+ case UNARY_- => negate
+ case UNARY_~ => complement
+ case `toByte` => toByte
+ case `toShort` => toShort
+ case `toChar` => toCharacter
+ case `toInt` => toInteger
+ case `toLong` => toLong
+ case `toFloat` => toFloat
+ case `toDouble` => toDouble
+ case _ => NO_NAME
+ }
+
+ val reflPolyCacheName: NameType = "reflPoly$Cache"
+ val reflClassCacheName: NameType = "reflClass$Cache"
+ val reflParamsCacheName: NameType = "reflParams$Cache"
+ val reflMethodCacheName: NameType = "reflMethod$Cache"
+ val reflMethodName: NameType = "reflMethod$Method"
+
+ private val reflectionCacheNames = Set[NameType](
+ reflPolyCacheName,
+ reflClassCacheName,
+ reflParamsCacheName,
+ reflMethodCacheName,
+ reflMethodName
+ )
+ def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
+
+ @switch def productAccessorName(j: Int): TermName = j match {
+ case 1 => nme._1
+ case 2 => nme._2
+ case 3 => nme._3
+ case 4 => nme._4
+ case 5 => nme._5
+ case 6 => nme._6
+ case 7 => nme._7
+ case 8 => nme._8
+ case 9 => nme._9
+ case 10 => nme._10
+ case 11 => nme._11
+ case 12 => nme._12
+ case 13 => nme._13
+ case 14 => nme._14
+ case 15 => nme._15
+ case 16 => nme._16
+ case 17 => nme._17
+ case 18 => nme._18
+ case 19 => nme._19
+ case 20 => nme._20
+ case 21 => nme._21
+ case 22 => nme._22
+ case _ => newTermName("_" + j)
+ }
}
abstract class SymbolNames {
- protected implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+ protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
val BeanProperty : TypeName
val BooleanBeanProperty : TypeName
@@ -471,7 +707,7 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
class JavaKeywords {
private var kws: Set[TermName] = Set()
private def kw(s: String): TermName = {
- val result = newTermName(s)
+ val result = newTermNameCached(s)
kws = kws + result
result
}
@@ -555,12 +791,12 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val Throwable: TypeName = "java.lang.Throwable"
final val ValueType: TypeName = tpnme.NO_NAME
- final val ForName: TermName = "forName"
- final val GetCause: TermName = "getCause"
- final val GetClass: TermName = "getClass"
- final val GetMethod: TermName = "getMethod"
- final val Invoke: TermName = "invoke"
- final val JavaLang: TermName = "java.lang"
+ final val ForName: TermName = newTermName("forName")
+ final val GetCause: TermName = newTermName("getCause")
+ final val GetClass: TermName = newTermName("getClass")
+ final val GetMethod: TermName = newTermName("getMethod")
+ final val Invoke: TermName = newTermName("invoke")
+ final val JavaLang: TermName = newTermName("java.lang")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
@@ -593,12 +829,12 @@ trait StdNames extends /*reflect.generic.StdNames with*/ NameManglers { self: Sy
final val Throwable: TypeName = "System.Exception"
final val ValueType: TypeName = "System.ValueType"
- final val ForName: TermName = "GetType"
- final val GetCause: TermName = "InnerException" /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = "GetType"
- final val GetMethod: TermName = "GetMethod"
- final val Invoke: TermName = "Invoke"
- final val JavaLang: TermName = "System"
+ final val ForName: TermName = newTermName("GetType")
+ final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
+ final val GetClass: TermName = newTermName("GetType")
+ final val GetMethod: TermName = newTermName("GetMethod")
+ final val Invoke: TermName = newTermName("Invoke")
+ final val JavaLang: TermName = newTermName("System")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> "System.Boolean",
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 0e9210f1f7..5be69e06ad 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -10,9 +10,11 @@ import scala.collection.{ mutable, immutable }
import util._
abstract class SymbolTable extends api.Universe
+ with Collections
with Names
with Symbols
with Types
+ with Kinds
with Scopes
with Definitions
with Constants
@@ -31,8 +33,8 @@ abstract class SymbolTable extends api.Universe
{
def rootLoader: LazyType
def log(msg: => AnyRef): Unit
- def abort(msg: String): Nothing = throw new Error(msg)
- def abort(): Nothing = throw new Error()
+ def abort(msg: String): Nothing = throw new FatalError(msg)
+ def abort(): Nothing = abort("unknown error")
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index a59ea2c1bf..6ee061392c 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -264,7 +264,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new getter for current symbol (which must be a field)
*/
final def newGetter: Symbol = (
- owner.newMethod(focusPos(pos), nme.getterName(name))
+ owner.newMethod(focusPos(pos), nme.getterName(name.toTermName))
setFlag getterFlags(flags)
setPrivateWithin privateWithin
setInfo MethodType(Nil, tpe)
@@ -409,7 +409,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isError = hasFlag(IS_ERROR)
final def isErroneous = isError || isInitialized && tpe.isErroneous
final def isTypeParameterOrSkolem = isType && hasFlag(PARAM)
- final def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
+ final def isHigherOrderTypeParameter = (this ne NoSymbol) && owner.isTypeParameterOrSkolem
final def isTypeSkolem = isSkolem && hasFlag(PARAM)
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
@@ -430,7 +430,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def isJavaInterface = isJavaDefined && isTrait
- final def needsFlatClasses: Boolean = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
// In java.lang, Predef, or scala package/package object
def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner)
@@ -494,8 +494,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// string. So this needs attention. For now the fact that migration is
// private[scala] ought to provide enough protection.
def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass)
- def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(2) }
- def migrationVersion = getAnnotation(MigrationAnnotationClass) map { version => version.intArg(0).get + "." + version.intArg(1).get }
+ def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
+ def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
@@ -699,16 +699,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
rawowner = owner
}
- private[Symbols] def flattenName(): Name = {
- // This assertion caused me no end of trouble in the interpeter in situations
- // where everything proceeds smoothly if there's no assert. I don't think calling "name"
- // on a symbol is the right place to throw fatal exceptions if things don't look right.
- // It really hampers exploration. Finally I gave up and disabled it, and tickets like
- // SI-4874 instantly start working.
- // assert(rawowner.isClass, "fatal: %s has non-class owner %s after flatten.".format(rawname + idString, rawowner))
-
- nme.flattenedName(rawowner.name, rawname)
- }
def ownerChain: List[Symbol] = this :: owner.ownerChain
def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain
@@ -756,7 +746,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** If this symbol has an expanded name, its original name, otherwise its name itself.
* @see expandName
*/
- def originalName = nme.originalName(name)
+ def originalName: Name = nme.originalName(name)
/** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
*/
@@ -764,20 +754,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
*/
- def decodedName: String = stripNameString(NameTransformer.decode(encodedName))
+ def decodedName: String = nme.dropLocalSuffix(name).decode
- /** Either "$" or "" depending on whether this is a module class.
- */
+ private def addModuleSuffix(n: Name): Name =
+ if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
+
def moduleSuffix: String = (
- if (hasModuleFlag && !isMethod && !isImplClass && !isJavaDefined) nme.MODULE_SUFFIX_STRING
+ if (needsModuleSuffix) nme.MODULE_SUFFIX_STRING
else ""
)
-
+ /** Whether this symbol needs nme.MODULE_SUFFIX_STRING (aka $) appended on the java platform.
+ */
+ def needsModuleSuffix = (
+ hasModuleFlag
+ && !isMethod
+ && !isImplClass
+ && !isJavaDefined
+ )
/** These should be moved somewhere like JavaPlatform.
*/
- def javaSimpleName = ("" + simpleName).trim + moduleSuffix
- def javaBinaryName = fullNameInternal('/') + moduleSuffix
- def javaClassName = fullNameInternal('.') + moduleSuffix
+ def javaSimpleName: String = addModuleSuffix(nme.dropLocalSuffix(simpleName)).toString
+ def javaBinaryName: String = addModuleSuffix(fullNameInternal('/')).toString
+ def javaClassName: String = addModuleSuffix(fullNameInternal('.')).toString
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by `separator` characters.
@@ -785,22 +783,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* Never adds id.
* Drops package objects.
*/
- final def fullName(separator: Char): String = stripNameString(fullNameInternal(separator))
+ final def fullName(separator: Char): String = fullNameAsName(separator).toString
/** Doesn't drop package objects, for those situations (e.g. classloading)
* where the true path is needed.
*/
- private def fullNameInternal(separator: Char): String = (
- if (isRoot || isRootPackage || this == NoSymbol) this.toString
- else if (owner.isEffectiveRoot) encodedName
- else effectiveOwner.enclClass.fullName(separator) + separator + encodedName
+ private def fullNameInternal(separator: Char): Name = (
+ if (isRoot || isRootPackage || this == NoSymbol) name
+ else if (owner.isEffectiveRoot) name
+ else effectiveOwner.enclClass.fullNameAsName(separator) append separator append name
)
-
- /** Strip package objects and any local suffix.
- */
- private def stripNameString(s: String) =
- if (settings.debug.value) s
- else s stripSuffix nme.LOCAL_SUFFIX_STRING
+
+ def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by periods.
@@ -1097,6 +1091,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def typeParams: List[Symbol] =
if (isMonomorphicType) Nil
else {
+ // analogously to the "info" getter, here we allow for two completions:
+ // one: sourceCompleter to LazyType, two: LazyType to completed type
+ if (validTo == NoPeriod)
+ atPhase(phaseOf(infos.validFrom))(rawInfo load this)
if (validTo == NoPeriod)
atPhase(phaseOf(infos.validFrom))(rawInfo load this)
@@ -1238,12 +1236,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def isNestedIn(that: Symbol): Boolean =
owner == that || owner != NoSymbol && (owner isNestedIn that)
-
- /** Is this class symbol a subclass of that symbol? */
- final def isNonBottomSubClass(that: Symbol): Boolean = (
- (this eq that) || this.isError || that.isError ||
- info.baseTypeIndex(that) >= 0
- )
+
+ /** Is this class symbol a subclass of that symbol,
+ * and is this class symbol also different from Null or Nothing? */
+ def isNonBottomSubClass(that: Symbol): Boolean = false
/** Overridden in NullClass and NothingClass for custom behavior.
*/
@@ -1365,7 +1361,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The symbol accessed by this accessor function, but with given owner type. */
final def accessed(ownerTp: Type): Symbol = {
assert(hasAccessorFlag, this)
- ownerTp decl nme.getterToLocal(getterName)
+ ownerTp decl nme.getterToLocal(getterName.toTermName)
}
/** The module corresponding to this module class (note that this
@@ -1698,17 +1694,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
final def getter(base: Symbol): Symbol = base.info.decl(getterName) filter (_.hasAccessorFlag)
- def getterName = (
- if (isSetter) nme.setterToGetter(name)
- else if (nme.isLocalName(name)) nme.localToGetter(name)
- else name
+ def getterName: TermName = (
+ if (isSetter) nme.setterToGetter(name.toTermName)
+ else if (nme.isLocalName(name)) nme.localToGetter(name.toTermName)
+ else name.toTermName
)
/** The setter of this value or getter definition, or NoSymbol if none exists */
final def setter(base: Symbol): Symbol = setter(base, false)
final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
- var sname = nme.getterToSetter(nme.getterName(name))
+ var sname = nme.getterToSetter(nme.getterName(name.toTermName))
if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
base.info.decl(sname) filter (_.hasAccessorFlag)
}
@@ -1763,7 +1759,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
getter(owner).expandName(base)
setter(owner).expandName(base)
}
- name = nme.expandedName(name, base)
+ name = nme.expandedName(name.toTermName, base)
if (isType) name = name
}
}
@@ -1796,7 +1792,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ toString -------------------------------------------------------------------
/** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag = fullName.##
+ final def tag: Int = fullName.##
/** The simple name of this Symbol */
final def simpleName: Name = name
@@ -1806,7 +1802,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* (the initial Name) before falling back on id, which varies depending
* on exactly when a symbol is loaded.
*/
- final def sealedSortName = initName + "#" + id
+ final def sealedSortName: String = initName + "#" + id
/** String representation of symbol's definition key word */
final def keyString: String =
@@ -1874,7 +1870,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* E.g. $eq => =.
* If settings.uniqid, adds id.
*/
- def nameString = decodedName + idString
+ def nameString: String = (
+ if (settings.uniqid.value) decodedName + "#" + id
+ else "" + decodedName
+ )
/** If settings.uniqid is set, the symbol's id, else "" */
final def idString = if (settings.uniqid.value) "#"+id else ""
@@ -1882,14 +1881,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** String representation, including symbol's kind e.g., "class Foo", "method Bar".
* If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
*/
- override def toString = compose(
+ override def toString: String = compose(
kindString,
if (hasMeaninglessName) owner.decodedName + idString else nameString
)
/** String representation of location.
*/
- def ownsString = {
+ def ownsString: String = {
val owns = effectiveOwner
if (owns.isClass && !owns.isEmptyPrefix) "" + owns else ""
}
@@ -1897,12 +1896,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** String representation of location, plus a preposition. Doesn't do much,
* for backward compatibility reasons.
*/
- def locationString = ownsString match {
+ def locationString: String = ownsString match {
case "" => ""
case s => " in " + s
}
- def fullLocationString = toString + locationString
- def signatureString = if (hasRawInfo) infoString(rawInfo) else "<_>"
+ def fullLocationString: String = toString + locationString
+ def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
/** String representation of symbol's definition following its name */
final def infoString(tp: Type): String = {
@@ -1994,7 +1993,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
extends Symbol(initOwner, initPos, initName) {
final override def isTerm = true
- override def name: TermName = super.name
+ override def name: TermName = rawname.toTermName
privateWithin = NoSymbol
var referenced: Symbol = NoSymbol
@@ -2081,20 +2080,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
extends TermSymbol(initOwner, initPos, initName) {
private var flatname: TermName = null
- // This method could use a better name from someone clearer on what the condition expresses.
- private def isFlatAdjusted = !isMethod && needsFlatClasses
- override def owner: Symbol =
- if (isFlatAdjusted) rawowner.owner
+ override def owner = (
+ if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
-
- override def name: TermName =
- if (isFlatAdjusted) {
- if (flatname == null)
- flatname = flattenName().toTermName
-
+ )
+ override def name: TermName = (
+ if (!isMethod && needsFlatClasses) {
+ if (flatname eq null)
+ flatname = nme.flattenedName(rawowner.name, rawname)
+
flatname
- } else rawname
+ }
+ else rawname.toTermName
+ )
override def cloneSymbolImpl(owner: Symbol): Symbol =
new ModuleSymbol(owner, pos, name).copyAttrsFrom(this)
@@ -2217,6 +2216,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
super.info_=(tp)
}
+ final override def isNonBottomSubClass(that: Symbol): Boolean = (
+ (this eq that) || this.isError || that.isError ||
+ info.baseTypeIndex(that) >= 0
+ )
+
override def reset(completer: Type) {
super.reset(completer)
tpePeriod = NoPeriod
@@ -2288,7 +2292,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** A class for class symbols */
class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
-
+ private var flatname: TypeName = null
private var source: AbstractFileType = null
private var thissym: Symbol = this
@@ -2307,20 +2311,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
thissym = this
}
- private var flatname: TypeName = null
-
- override def owner: Symbol =
- if (needsFlatClasses) rawowner.owner
- else rawowner
-
- override def name: TypeName =
- if (needsFlatClasses) {
- if (flatname == null)
- flatname = flattenName().toTypeName
- flatname
- }
- else rawname.asInstanceOf[TypeName]
-
private var thisTypeCache: Type = _
private var thisTypePeriod = NoPeriod
@@ -2336,7 +2326,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
thisTypeCache
}
-
+
+ override def owner: Symbol =
+ if (needsFlatClasses) rawowner.owner else rawowner
+ override def name: TypeName = (
+ if (needsFlatClasses) {
+ if (flatname eq null)
+ flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
+
+ flatname
+ }
+ else rawname.toTypeName
+ )
+
/** A symbol carrying the self type of the class as its type */
override def thisSym: Symbol = thissym
@@ -2407,7 +2409,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def sourceModule_=(module: Symbol) { this.module = module }
}
- class FreeVar(name: TermName, tpe: Type, val value: Any) extends TermSymbol(definitions.RootClass, NoPosition, name) {
+ class FreeVar(name0: TermName, tpe: Type, val value: Any) extends TermSymbol(definitions.RootClass, NoPosition, name0) {
setInfo(tpe)
override def hashCode = value.hashCode
@@ -2511,6 +2513,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
val syms1 = cloneSymbolsAtOwner(syms, owner)
creator(syms1, tpe.substSym(syms, syms1))
}
+
+ /** A deep map on a symbol's paramss.
+ */
+ def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
/** Create a new existential type skolem with the given owner and origin.
*/
diff --git a/src/compiler/scala/reflect/internal/TreeGen.scala b/src/compiler/scala/reflect/internal/TreeGen.scala
index 1c93a904c0..e537c6b83f 100644
--- a/src/compiler/scala/reflect/internal/TreeGen.scala
+++ b/src/compiler/scala/reflect/internal/TreeGen.scala
@@ -154,9 +154,13 @@ abstract class TreeGen {
debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase)
assert(!tree.tpe.isInstanceOf[MethodType], tree)
assert(!pt.typeSymbol.isPackageClass && !pt.typeSymbol.isPackageObjectClass, pt)
- // @MAT only called during erasure, which already takes care of that
- // @PP: "only called during erasure" is not very true these days.
- // In addition, at least, are: typer, uncurry, explicitouter, cleanup.
+ // called during (at least): typer, uncurry, explicitouter, cleanup.
+ // TODO: figure out the truth table for any/wrapInApply
+ // - the `any` flag seems to relate to erasure's adaptMember: "x.asInstanceOf[T] becomes x.$asInstanceOf[T]",
+ // where asInstanceOf is Any_asInstanceOf and $asInstanceOf is Object_asInstanceOf
+ // erasure will only unbox the value in a tree made by mkCast if `any && wrapInApply`
+ // - the `wrapInApply` flag need not be true if the tree will be adapted to have the empty argument list added before it gets to erasure
+ // in fact, I think it should be false for trees that will be type checked during typer
assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize))
atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = false, wrapInApply = true))
}
@@ -262,6 +266,25 @@ abstract class TreeGen {
tree setType tp
}
+ def mkZeroContravariantAfterTyper(tp: Type): Tree = {
+ // contravariant -- for replacing an argument in a method call
+ // must use subtyping, as otherwise we miss types like `Any with Int`
+ val tree =
+ if (NullClass.tpe <:< tp) Literal(Constant(null))
+ else if (UnitClass.tpe <:< tp) Literal(Constant())
+ else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
+ else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
+ else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
+ else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
+ else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
+ else if (IntClass.tpe <:< tp) Literal(Constant(0))
+ else if (LongClass.tpe <:< tp) Literal(Constant(0L))
+ else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
+ else mkCast(Literal(Constant(null)), tp)
+
+ tree
+ }
+
/** Builds a tuple */
def mkTuple(elems: List[Tree]): Tree =
if (elems.isEmpty) Literal(Constant())
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 1dc93a7add..e3ee39d2a0 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -107,7 +107,15 @@ abstract class TreeInfo {
@deprecated("Use isExprSafeToInline instead", "2.10.0")
def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
- def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = {
+ def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
+ mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+
+ def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
+ val b = List.newBuilder[R]
+ foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
+ b.result
+ }
+ def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
val plen = params.length
val alen = args.length
def fail() = {
@@ -116,27 +124,29 @@ abstract class TreeInfo {
" params = " + params + "\n" +
" args = " + args + "\n"
)
- params zip args
+ false
}
- if (plen == alen) params zip args
- else if (params.isEmpty) fail
+ if (plen == alen) foreach2(params, args)(f)
+ else if (params.isEmpty) return fail
else if (isVarArgsList(params)) {
val plenInit = plen - 1
if (alen == plenInit) {
if (alen == 0) Nil // avoid calling mismatched zip
- else params.init zip args
+ else foreach2(params.init, args)(f)
}
- else if (alen < plenInit) fail
+ else if (alen < plenInit) return fail
else {
- val front = params.init zip (args take plenInit)
- val back = args drop plenInit map (a => (params.last, a))
- front ++ back
+ foreach2(params.init, args take plenInit)(f)
+ val remainingArgs = args drop plenInit
+ foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
}
}
- else fail
- }
+ else return fail
+ true
+ }
+
/**
* Selects the correct parameter list when there are nested applications.
* Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
@@ -144,22 +154,28 @@ abstract class TreeInfo {
* applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
* correspond to the third parameter list.
*
+ * The argument fn is the function part of the apply node being considered.
+ *
* Also accounts for varargs.
*/
+ private def applyMethodParameters(fn: Tree): List[Symbol] = {
+ val depth = applyDepth(fn)
+ // There could be applies which go beyond the parameter list(s),
+ // being applied to the result of the method call.
+ // !!! Note that this still doesn't seem correct, although it should
+ // be closer than what it replaced.
+ if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
+ else if (fn.symbol.paramss.isEmpty) Nil
+ else fn.symbol.paramss.last
+ }
+
def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
- case Apply(fn, args) =>
- val depth = applyDepth(fn)
- // There could be applies which go beyond the parameter list(s),
- // being applied to the result of the method call.
- // !!! Note that this still doesn't seem correct, although it should
- // be closer than what it replaced.
- val params = (
- if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
- else if (fn.symbol.paramss.isEmpty) Nil
- else fn.symbol.paramss.last
- )
- zipMethodParamsAndArgs(params, args)
- case _ => Nil
+ case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
+ case _ => Nil
+ }
+ def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
+ case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
+ case _ =>
}
/** Is symbol potentially a getter of a variable?
@@ -176,7 +192,7 @@ abstract class TreeInfo {
def isVariableOrGetter(tree: Tree) = {
def sym = tree.symbol
def isVar = sym.isVariable
- def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name)) != NoSymbol
+ def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name.toTermName)) != NoSymbol
tree match {
case Ident(_) => isVar
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 566ee0e9cf..96f2c5cc45 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -160,12 +160,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
*/
def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
atPos(sym.pos) {
- ModuleDef(Modifiers(sym.flags), sym.name, impl) setSymbol sym
+ ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
}
def ValDef(sym: Symbol, rhs: Tree): ValDef =
atPos(sym.pos) {
- ValDef(Modifiers(sym.flags), sym.name,
+ ValDef(Modifiers(sym.flags), sym.name.toTermName,
TypeTree(sym.tpe) setPos focusPos(sym.pos),
rhs) setSymbol sym
}
@@ -182,7 +182,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
atPos(sym.pos) {
assert(sym != NoSymbol)
DefDef(Modifiers(sym.flags),
- sym.name,
+ sym.name.toTermName,
sym.typeParams map TypeDef,
vparamss,
TypeTree(sym.tpe.finalResultType) setPos focusPos(sym.pos),
@@ -193,7 +193,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
- DefDef(sym, mods, sym.paramss map (_.map(ValDef)), rhs)
+ DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
def DefDef(sym: Symbol, rhs: Tree): DefDef =
DefDef(sym, Modifiers(sym.flags), rhs)
@@ -214,7 +214,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
atPos(sym.pos) {
- LabelDef(sym.name, params map Ident, rhs) setSymbol sym
+ LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
}
@@ -248,10 +248,13 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Block factory that flattens directly nested blocks.
*/
- def Block(stats: Tree*): Block = stats match {
- case Seq(b @ Block(_, _)) => b
- case Seq(stat) => Block(stats.toList, Literal(Constant(())))
- case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
+ def Block(stats: Tree*): Block = {
+ if (stats.isEmpty) Block(Nil, Literal(Constant(())))
+ else stats match {
+ case Seq(b @ Block(_, _)) => b
+ case Seq(stat) => Block(stats.toList, Literal(Constant(())))
+ case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
+ }
}
// --- specific traversers and transformers
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 265261f594..4630733db4 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -87,11 +87,20 @@ trait Types extends api.Types { self: SymbolTable =>
private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
private final val printLubs = sys.props contains "scalac.debug.lub"
+ private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
/** In case anyone wants to turn off lub verification without reverting anything. */
private final val verifyLubs = true
+ /** In case anyone wants to turn off type parameter bounds being used
+ * to seed type constraints.
+ */
+ private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
protected val enableTypeVarExperimentals = settings.Xexperimental.value
+ /** Empty immutable maps to avoid allocations. */
+ private val emptySymMap = immutable.Map[Symbol, Symbol]()
+ private val emptySymCount = immutable.Map[Symbol, Int]()
+
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
@@ -484,7 +493,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
-
/** For a classtype or refined type, its defined or declared members;
* inherited by subtypes and typerefs.
* The empty scope for all other types.
@@ -496,6 +504,9 @@ trait Types extends api.Types { self: SymbolTable =>
* Alternatives of overloaded symbol appear in the order they are declared.
*/
def decl(name: Name): Symbol = findDecl(name, 0)
+
+ /** A list of all non-private members defined or declared in this type. */
+ def nonPrivateDecls: List[Symbol] = decls filter (x => !x.isPrivate) toList
/** The non-private defined or declared members with name `name` in this type;
* an OverloadedSymbol if several exist, NoSymbol if none exist.
@@ -507,47 +518,67 @@ trait Types extends api.Types { self: SymbolTable =>
* Members appear in linearization order of their owners.
* Members with the same owner appear in reverse order of their declarations.
*/
- def members: List[Symbol] = findMember(nme.ANYNAME, 0, 0, false).alternatives
+ def members: List[Symbol] = membersBasedOnFlags(0, 0)
/** A list of all non-private members of this type (defined or inherited) */
- def nonPrivateMembers: List[Symbol] =
- findMember(nme.ANYNAME, PRIVATE | BridgeFlags, 0, false).alternatives
+ def nonPrivateMembers: List[Symbol] = membersBasedOnFlags(BridgeAndPrivateFlags, 0)
/** A list of all non-private members of this type (defined or inherited),
* admitting members with given flags `admit`
*/
- def nonPrivateMembersAdmitting(admit: Long): List[Symbol] =
- findMember(nme.ANYNAME, (PRIVATE | BridgeFlags) & ~admit, 0, false).alternatives
+ def nonPrivateMembersAdmitting(admit: Long): List[Symbol] = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
/** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: List[Symbol] =
- findMember(nme.ANYNAME, BridgeFlags, IMPLICIT, false).alternatives
+ def implicitMembers: List[Symbol] = membersBasedOnFlags(BridgeFlags, IMPLICIT)
/** A list of all deferred symbols of this type (defined or inherited) */
- def deferredMembers: List[Symbol] =
- findMember(nme.ANYNAME, BridgeFlags, DEFERRED, false).alternatives
+ def deferredMembers: List[Symbol] = membersBasedOnFlags(BridgeFlags, DEFERRED)
/** The member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
- def member(name: Name): Symbol = findMember(name, BridgeFlags, 0, false)
+ def member(name: Name): Symbol =
+ memberBasedOnName(name, BridgeFlags)
/** The non-private member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist.
* Bridges are excluded from the result
*/
def nonPrivateMember(name: Name): Symbol =
- findMember(name, PRIVATE | BridgeFlags, 0, false)
+ memberBasedOnName(name, BridgeAndPrivateFlags)
+
+ /** All members with the given flags, excluding bridges.
+ */
+ def membersWithFlags(requiredFlags: Long): List[Symbol] =
+ membersBasedOnFlags(BridgeFlags, requiredFlags)
- /** The non-private member with given name, admitting members with given flags `admit`
- * an OverloadedSymbol if several exist, NoSymbol if none exist
+ /** All non-private members with the given flags, excluding bridges.
+ */
+ def nonPrivateMembersWithFlags(requiredFlags: Long): List[Symbol] =
+ membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
+
+ /** The non-private member with given name, admitting members with given flags `admit`.
+ * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
+ * flag are usually excluded from findMember results, but supplying any of those flags
+ * to this method disables that exclusion.
+ *
+ * An OverloadedSymbol if several exist, NoSymbol if none exists.
*/
def nonPrivateMemberAdmitting(name: Name, admit: Long): Symbol =
- findMember(name, (PRIVATE | BridgeFlags) & ~admit, 0, false)
+ memberBasedOnName(name, BridgeAndPrivateFlags & ~admit)
/** The non-local member with given name,
* an OverloadedSymbol if several exist, NoSymbol if none exist */
def nonLocalMember(name: Name): Symbol =
- findMember(name, LOCAL | BridgeFlags, 0, false)
+ memberBasedOnName(name, BridgeFlags | LOCAL)
+
+ /** Members excluding and requiring the given flags.
+ * Note: unfortunately it doesn't work to exclude DEFERRED this way.
+ */
+ def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): List[Symbol] =
+ findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
+
+ def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
+ findMember(name, excludedFlags, 0, false)
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -568,7 +599,7 @@ trait Types extends api.Types { self: SymbolTable =>
* T.asSeenFrom(ThisType(C), D) (where D is owner of m)
* = Int
*/
- def asSeenFrom(pre: Type, clazz: Symbol): Type =
+ def asSeenFrom(pre: Type, clazz: Symbol): Type = {
if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this
else {
// scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") {
@@ -584,6 +615,7 @@ trait Types extends api.Types { self: SymbolTable =>
stopTimer(asSeenFromNanos, start)
result
}
+ }
/** The info of `sym`, seen as a member of this type.
*
@@ -649,7 +681,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** Returns all parts of this type which satisfy predicate `p` */
- def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p).collect(this).toList
+ def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
+ def withFilter(p: Type => Boolean) = new FilterTypeCollector(p) {
+ def foreach[U](f: Type => U): Unit = collect(Type.this) foreach f
+ def map[T](f: Type => T): List[T] = collect(Type.this) map f
+ }
/** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
@@ -1002,7 +1038,6 @@ trait Types extends api.Types { self: SymbolTable =>
baseClasses.head.newOverloaded(this, members.toList)
}
}
-
/** The existential skolems and existentially quantified variables which are free in this type */
def existentialSkolems: List[Symbol] = {
var boundSyms: List[Symbol] = List()
@@ -1283,6 +1318,7 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
+ def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass)
// override def isNullable: Boolean = NullClass.tpe <:< lo;
override def safeToString = ">: " + lo + " <: " + hi
override def kind = "TypeBoundsType"
@@ -1609,29 +1645,40 @@ trait Types extends api.Types { self: SymbolTable =>
// (this can happen only for erroneous programs).
}
+ private object enterRefs extends TypeMap {
+ private var tparam: Symbol = _
+
+ def apply(tp: Type): Type = {
+ tp match {
+ case TypeRef(_, sym, args) if args.nonEmpty =>
+ if (settings.debug.value && !sameLength(sym.info.typeParams, args))
+ debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+
+ foreach2(sym.info.typeParams, args) { (tparam1, arg) =>
+ if (arg contains tparam) {
+ addRef(NonExpansive, tparam, tparam1)
+ if (arg.typeSymbol != tparam)
+ addRef(Expansive, tparam, tparam1)
+ }
+ }
+ case _ =>
+ }
+ mapOver(tp)
+ }
+ def enter(tparam0: Symbol, parent: Type) {
+ this.tparam = tparam0
+ this(parent)
+ }
+ }
+
/** Compute initial (one-step) references and set state to `Initializing`.
*/
private def computeRefs() {
refs = Array(Map(), Map())
- for (tparam <- typeSymbol.typeParams) {
- val enterRefs = new TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- if (settings.debug.value && !sameLength(sym.info.typeParams, args))
- debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
-
- for ((tparam1, arg) <- sym.info.typeParams zip args; if arg contains tparam) {
- addRef(NonExpansive, tparam, tparam1)
- if (arg.typeSymbol != tparam)
- addRef(Expansive, tparam, tparam1)
- }
- case _ =>
- }
- mapOver(tp)
- }
+ typeSymbol.typeParams foreach { tparam =>
+ parents foreach { p =>
+ enterRefs.enter(tparam, p)
}
- for (p <- parents) enterRefs(p)
}
state = Initializing
}
@@ -1896,7 +1943,7 @@ A type's typeSymbol should never be inspected directly.
// note: does not go through typeRef. There's no need to because
// neither `pre` nor `sym` changes. And there's a performance
// advantage to call TypeRef directly.
- override def typeConstructor = TypeRef(pre, sym, Nil)
+ override def typeConstructor = if (args.isEmpty) this else TypeRef(pre, sym, Nil)
// A reference (in a Scala program) to a type that has type
// parameters, but where the reference does not include type
@@ -2149,15 +2196,23 @@ A type's typeSymbol should never be inspected directly.
override def isJava = true
}
- case class NullaryMethodType(override val resultType: Type) extends SimpleTypeProxy {
- override def underlying = resultType
- override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
- override def paramSectionCount = 0
- override def paramss = Nil
- override def params = Nil
- override def paramTypes = Nil
- override def safeToString = "=> " + resultType
- override def kind = "NullaryMethodType"
+ case class NullaryMethodType(override val resultType: Type) extends Type {
+ override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
+ override def prefix: Type = resultType.prefix
+ override def narrow: Type = resultType.narrow
+ override def finalResultType: Type = resultType.finalResultType
+ override def termSymbol: Symbol = resultType.termSymbol
+ override def typeSymbol: Symbol = resultType.typeSymbol
+ override def parents: List[Type] = resultType.parents
+ override def decls: Scope = resultType.decls
+ override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
+ override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+ override def baseClasses: List[Symbol] = resultType.baseClasses
+ override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
+ override def boundSyms = resultType.boundSyms
+ override def isVolatile = resultType.isVolatile
+ override def safeToString: String = "=> "+ resultType
+ override def kind = "NullaryMethodType"
}
object NullaryMethodType extends NullaryMethodTypeExtractor
@@ -2224,6 +2279,15 @@ A type's typeSymbol should never be inspected directly.
}
object PolyType extends PolyTypeExtractor
+
+ /** A creator for existential types which flattens nested existentials.
+ */
+ def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
+ if (quantified.isEmpty) underlying
+ else underlying match {
+ case ExistentialType(qs, restpe) => newExistentialType(quantified ::: qs, restpe)
+ case _ => ExistentialType(quantified, underlying)
+ }
case class ExistentialType(quantified: List[Symbol],
override val underlying: Type) extends RewrappingTypeProxy
@@ -2288,7 +2352,7 @@ A type's typeSymbol should never be inspected directly.
}
override def cloneInfo(owner: Symbol) =
- createFromClonedSymbolsAtOwner(quantified, owner, underlying)(ExistentialType(_, _))
+ createFromClonedSymbolsAtOwner(quantified, owner, underlying)(newExistentialType)
override def atOwner(owner: Symbol) =
if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
@@ -2373,31 +2437,54 @@ A type's typeSymbol should never be inspected directly.
// but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
- // encapsulate suspension so we can automatically link the suspension of cloned
- // typevars to their original if this turns out to be necessary
-/*
- def Suspension = new Suspension
- class Suspension {
- private val suspended = mutable.HashSet[TypeVar]()
- def suspend(tv: TypeVar): Unit = {
- tv.suspended = true
- suspended += tv
- }
- def resumeAll(): Unit = {
- for (tv <- suspended) {
- tv.suspended = false
+ @inline final def trace[T](action: String, msg: => String)(value: T): T = {
+ if (traceTypeVars) {
+ val s = msg match {
+ case "" => ""
+ case str => "( " + str + " )"
}
- suspended.clear()
+ Console.err.println("[%10s] %-25s%s".format(action, value, s))
}
+ value
+ }
+
+ /** Create a new TypeConstraint based on the given symbol.
+ */
+ private def deriveConstraint(tparam: Symbol): TypeConstraint = {
+ /** Must force the type parameter's info at this point
+ * or things don't end well for higher-order type params.
+ * See SI-5359.
+ */
+ val bounds = tparam.info.bounds
+ /** We can seed the type constraint with the type parameter
+ * bounds as long as the types are concrete. This should lower
+ * the complexity of the search even if it doesn't improve
+ * any results.
+ */
+ if (propagateParameterBoundsToTypeVars) {
+ val exclude = bounds.isEmptyBounds || bounds.exists(_.typeSymbolDirect.isNonClassType)
+
+ if (exclude) new TypeConstraint
+ else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
+ }
+ else new TypeConstraint
+ }
+ def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
+ def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
+ def apply(tparam: Symbol): TypeVar = apply(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams)
+
+ /** This is the only place TypeVars should be instantiated.
+ */
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar = {
+ val tv = (
+ if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr)
+ else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args)
+ else if (args.isEmpty) new HKTypeVar(origin, constr, params)
+ else throw new TypeError("Invalid TypeVar construction: " + ((origin, constr, args, params)))
+ )
+
+ trace("create", "In " + tv.originLocation)(tv)
}
-*/
- def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
- def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- // TODO why not initialise TypeConstraint with bounds of tparam?
- // @PP: I tried that, didn't work out so well for me.
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) =
- new TypeVar(origin, constr, args, params)
}
// TODO: I don't really know why this happens -- maybe because
@@ -2424,23 +2511,53 @@ A type's typeSymbol should never be inspected directly.
tp.typeSymbol
)
+ /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
+ */
+ class HKTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ override val params: List[Symbol]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(params.nonEmpty, this)
+ override def isHigherKinded = true
+ override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
+ }
+
+ /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
+ */
+ class AppliedTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ zippedArgs: List[(Symbol, Type)]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(zippedArgs.nonEmpty, this)
+
+ override def params: List[Symbol] = zippedArgs map (_._1)
+ override def typeArgs: List[Type] = zippedArgs map (_._2)
+
+ override protected def typeVarString = (
+ zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
+ )
+ }
+
/** A class representing a type variable: not used after phase `typer`.
*
* A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
* A TypeVar with nonEmpty typeArgs can only be instantiated by a higher-kinded
* type that can be applied to those args. A TypeVar is much like a TypeRef,
* except it has special logic for equality and subtyping.
+ *
+ * Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
class TypeVar(
val origin: Type,
- val constr0: TypeConstraint,
- override val typeArgs: List[Type],
- override val params: List[Symbol]
+ val constr0: TypeConstraint
) extends Type {
- private val numArgs = typeArgs.length
- // params are needed to keep track of variance (see mapOverArgs in SubstMap)
- assert(typeArgs.isEmpty || sameLength(typeArgs, params))
- // var tid = { tidCount += 1; tidCount } //DEBUG
+ override def params: List[Symbol] = Nil
+ override def typeArgs: List[Type] = Nil
+ override def isHigherKinded = false
/** The constraint associated with the variable */
var constr = constr0
@@ -2448,7 +2565,38 @@ A type's typeSymbol should never be inspected directly.
/** The variable's skolemization level */
val level = skolemizationLevel
-
+
+ /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
+ * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
+ *
+ * `constr` for `?CC` only tracks type constructors anyway,
+ * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
+ * `?CC's` hibounds contains List and Iterable.
+ */
+ def applyArgs(newArgs: List[Type]): TypeVar = (
+ if (newArgs.isEmpty && typeArgs.isEmpty)
+ this
+ else if (newArgs.size == params.size) {
+ val tv = TypeVar(origin, constr, newArgs, params)
+ TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
+ }
+ else
+ throw new TypeError("Invalid type application in TypeVar: " + params + ", " + newArgs)
+ )
+ // newArgs.length may differ from args.length (could've been empty before)
+ //
+ // !!! @PP - I need an example of this, since this exception never triggers
+ // even though I am requiring the size match.
+ //
+ // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
+ // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
+ // TypeVars get applied to different arguments over time (in asSeenFrom)
+ // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
+ // thus: make new TypeVar's for every application of a TV to args,
+ // inference may generate several TypeVar's for a single type parameter that must be inferred,
+ // only one of them is in the set of tvars that need to be solved, but
+ // they share the same TypeConstraint instance
+
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
@@ -2459,26 +2607,6 @@ A type's typeSymbol should never be inspected directly.
private var encounteredHigherLevel = false
private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
- /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
- * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
- *
- * `constr` for `?CC` only tracks type constructors anyway,
- * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
- * `?CC's` hibounds contains List and Iterable.
- */
- def applyArgs(newArgs: List[Type]): TypeVar =
- if (newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...)
- else TypeVar(origin, constr, newArgs, params) // @M TODO: interaction with undoLog??
- // newArgs.length may differ from args.length (could've been empty before)
- // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
- // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
- // TypeVars get applied to different arguments over time (in asSeenFrom)
- // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
- // thus: make new TypeVar's for every application of a TV to args,
- // inference may generate several TypeVar's for a single type parameter that must be inferred,
- // only one of them is in the set of tvars that need to be solved, but
- // they share the same TypeConstraint instance
-
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
@@ -2487,7 +2615,8 @@ A type's typeSymbol should never be inspected directly.
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
- constr.inst = if (shouldRepackType) repackExistential(tp) else tp
+ val res = if (shouldRepackType) repackExistential(tp) else tp
+ constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
@@ -2564,11 +2693,10 @@ A type's typeSymbol should never be inspected directly.
* type parameter we're trying to infer (the result will be sanity-checked later).
*/
def unifyFull(tpe: Type) = {
- // Since the alias/widen variations are often no-ops, this
- // keenly collects them in a Set to avoid redundant tests.
+ // The alias/widen variations are often no-ops.
val tpes = (
- if (isLowerBound) Set(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias)
- else Set(tpe)
+ if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct
+ else List(tpe)
)
tpes exists { tp =>
val lhs = if (isLowerBound) tp.typeArgs else typeArgs
@@ -2668,32 +2796,56 @@ A type's typeSymbol should never be inspected directly.
|| !containsSkolemAboveLevel(tp) // side-effects tracking boolean
|| enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences
)
- override val isHigherKinded = typeArgs.isEmpty && params.nonEmpty
- override def normalize: Type =
+ override def normalize: Type = (
if (constr.instValid) constr.inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor)))
else super.normalize
-
+ )
override def typeSymbol = origin.typeSymbol
override def isStable = origin.isStable
override def isVolatile = origin.isVolatile
- private def levelString = if (settings.explaintypes.value) level else ""
- override def safeToString = constr.inst match {
- case null => "<null " + origin + ">"
- case NoType => "?" + levelString + origin + typeArgsString(this)
- case x => "" + x
+ private def tparamsOfSym(sym: Symbol) = sym.info match {
+ case PolyType(tparams, _) if tparams.nonEmpty =>
+ tparams map (_.defString) mkString("[", ",", "]")
+ case _ => ""
+ }
+ def originName = {
+ val name = origin.typeSymbolDirect.decodedName
+ if (name contains "_$") origin.typeSymbolDirect.decodedName else name
+ }
+ def originLocation = {
+ val sym = origin.typeSymbolDirect
+ val encl = sym.owner.logicallyEnclosingMember
+
+ // This should display somewhere between one and three
+ // things which enclose the origin: at most, a class, a
+ // a method, and a term. At least, a class.
+ List(
+ Some(encl.enclClass),
+ if (encl.isMethod) Some(encl) else None,
+ if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
+ ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
}
+ private def levelString = if (settings.explaintypes.value) level else ""
+ protected def typeVarString = originName
+ override def safeToString = (
+ if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
+ else if (constr.inst ne NoType) "" + constr.inst
+ else "?" + levelString + originName
+ )
override def kind = "TypeVar"
def cloneInternal = {
// cloning a suspended type variable when it's suspended will cause the clone
// to never be resumed with the current implementation
- assert(!suspended)
- TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ assert(!suspended, this)
+ TypeVar.trace("clone", originLocation)(
+ TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ )
}
}
@@ -2950,6 +3102,9 @@ A type's typeSymbol should never be inspected directly.
/** A creator for intersection type where intersections of a single type are
* replaced by the type itself, and repeated parent classes are merged.
+ *
+ * !!! Repeated parent classes are not merged - is this a bug in the
+ * comment or in the code?
*/
def intersectionType(tps: List[Type], owner: Symbol): Type = tps match {
case List(tp) =>
@@ -2987,7 +3142,7 @@ A type's typeSymbol should never be inspected directly.
case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
- case ExistentialType(tparams, restpe) => ExistentialType(tparams, appliedType(restpe, args))
+ case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
@@ -3026,71 +3181,23 @@ A type's typeSymbol should never be inspected directly.
*
* tpe1 where { tparams }
*
- * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`. Extrapolating means
- * that type variables in `tparams` occurring in covariant positions are replaced by upper bounds,
- * (minus any SingletonClass markers),
- * type variables in `tparams` occurring in contravariant positions are replaced by upper bounds,
- * provided the resulting type is legal wrt to stability, and does not contain any
- * type variable in `tparams`.
- * The abstraction drops all type parameters that are not directly or indirectly
- * referenced by type `tpe1`.
- * If there are no remaining type parameters, simply returns result type `tpe`.
+ * where `tpe1` is the result of extrapolating `tpe` wrt to `tparams`.
+ * Extrapolating means that type variables in `tparams` occurring
+ * in covariant positions are replaced by upper bounds, (minus any
+ * SingletonClass markers), type variables in `tparams` occurring in
+ * contravariant positions are replaced by upper bounds, provided the
+ * resulting type is legal wrt to stability, and does not contain any type
+ * variable in `tparams`.
+ *
+ * The abstraction drops all type parameters that are not directly or
+ * indirectly referenced by type `tpe1`. If there are no remaining type
+ * parameters, simply returns result type `tpe`.
*/
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
else {
- var occurCount = emptySymCount ++ (tparams map (_ -> 0))
- val tpe = deAlias(tpe0)
- def countOccs(tp: Type) =
- for (t <- tp) {
- t match {
- case TypeRef(_, sym, _) =>
- occurCount get sym match {
- case Some(count) => occurCount += (sym -> (count + 1))
- case none =>
- }
- case _ =>
- }
- }
- countOccs(tpe)
- for (tparam <- tparams) countOccs(tparam.info)
-
- val extrapolate = new TypeMap {
- variance = 1
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- tp1 match {
- case TypeRef(pre, sym, args) if (variance != 0) && (occurCount isDefinedAt sym) =>
- val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
- if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
- repl
- else tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- // Do not discard the types of existential ident's. The
- // symbol of the Ident itself cannot be listed in the
- // existential's parameters, so the resulting existential
- // type would be ill-formed.
- override def mapOver(tree: Tree) = tree match {
- case Ident(_) if tree.tpe.isStable => tree
- case _ => super.mapOver(tree)
- }
- }
- val tpe1 = extrapolate(tpe)
+ val tpe = deAlias(tpe0)
+ val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
var tparams0 = tparams
var tparams1 = tparams0 filter tpe1.contains
@@ -3100,11 +3207,7 @@ A type's typeSymbol should never be inspected directly.
tparams1 exists { p1 => p1 == p || (p1.info contains p) }
}
}
- if (tparams1.isEmpty) tpe1
- else tpe1 match {
- case ExistentialType(tparams2, tpe2) => ExistentialType(tparams1 ::: tparams2, tpe2)
- case _ => ExistentialType(tparams1, tpe1)
- }
+ newExistentialType(tparams1, tpe1)
}
/** Remove any occurrences of type aliases from this type */
@@ -3196,7 +3299,7 @@ A type's typeSymbol should never be inspected directly.
val isType = pnames.head.isTypeName
val newParams = for (name <- pnames) yield
if (isType) owner.newTypeParameter(NoPosition, name.toTypeName)
- else owner.newValueParameter(NoPosition, name)
+ else owner.newValueParameter(NoPosition, name.toTermName)
paramStack = newParams :: paramStack
try {
(newParams, ptypes).zipped foreach ((p, t) => p setInfo this(t))
@@ -3239,10 +3342,15 @@ A type's typeSymbol should never be inspected directly.
*/
class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
def this() = this(List(), List())
- private var lobounds = lo0
- private var hibounds = hi0
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound.
+ */
+ private var lobounds = lo0 filterNot (_.typeSymbolDirect eq NothingClass)
+ private var hibounds = hi0 filterNot (_.typeSymbolDirect eq AnyClass)
private var numlo = numlo0
private var numhi = numhi0
private var avoidWidening = avoidWidening0
@@ -3258,7 +3366,8 @@ A type's typeSymbol should never be inspected directly.
else if (!isNumericSubType(tp, numlo))
numlo = numericLoBound
}
- else lobounds ::= tp
+ else if (tp.typeSymbolDirect ne NothingClass)
+ lobounds ::= tp
}
def checkWidening(tp: Type) {
@@ -3277,7 +3386,8 @@ A type's typeSymbol should never be inspected directly.
else if (!isNumericSubType(numhi, tp))
numhi = numericHiBound
}
- else hibounds ::= tp
+ else if (tp.typeSymbolDirect ne AnyClass)
+ hibounds ::= tp
}
def isWithinBounds(tp: Type): Boolean =
@@ -3296,10 +3406,18 @@ A type's typeSymbol should never be inspected directly.
tc
}
- override def toString =
- (loBounds map (_.safeToString)).mkString("[ _>:(", ",", ") ") +
- (hiBounds map (_.safeToString)).mkString("| _<:(", ",", ") ] _= ") +
- inst.safeToString
+ override def toString = {
+ val boundsStr = (
+ if (loBounds.isEmpty && hiBounds.isEmpty) "[]"
+ else {
+ val lostr = if (loBounds.isEmpty) "" else loBounds map (_.safeToString) mkString("_>:(", ", ", ")")
+ val histr = if (hiBounds.isEmpty) "" else hiBounds map (_.safeToString) mkString("_<:(", ", ", ")")
+ List(lostr, histr) filterNot (_ == "") mkString ("[", " | ", "]")
+ }
+ )
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
}
trait AnnotationFilter extends TypeMap {
@@ -3315,16 +3433,82 @@ A type's typeSymbol should never be inspected directly.
def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
}
+ trait VariantTypeMap extends TypeMap {
+ private[this] var _variance = 1
+
+ override def variance = _variance
+ def variance_=(x: Int) = _variance = x
+
+ override protected def noChangeToSymbols(origSyms: List[Symbol]) = {
+ origSyms forall { sym =>
+ val v = variance
+ if (sym.isAliasType) variance = 0
+ val result = this(sym.info)
+ variance = v
+ result eq sym.info
+ }
+ }
+
+ override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ map2Conserve(args, tparams) { (arg, tparam) =>
+ val v = variance
+ if (tparam.isContravariant) variance = -variance
+ else if (!tparam.isCovariant) variance = 0
+ val arg1 = this(arg)
+ variance = v
+ arg1
+ }
+
+ /** Map this function over given type */
+ override def mapOver(tp: Type): Type = tp match {
+ case MethodType(params, result) =>
+ variance = -variance
+ val params1 = mapOver(params)
+ variance = -variance
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ variance = -variance
+ val tparams1 = mapOver(tparams)
+ variance = -variance
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case TypeBounds(lo, hi) =>
+ variance = -variance
+ val lo1 = this(lo)
+ variance = -variance
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 =
+ if (args.isEmpty)
+ args
+ else if (variance == 0) // fast & safe path: don't need to look at typeparams
+ args mapConserve this
+ else {
+ val tparams = sym.typeParams
+ if (tparams.isEmpty) args
+ else mapOverArgs(args, tparams)
+ }
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, coevolveSym(pre, pre1, sym), args1)
+ case _ =>
+ super.mapOver(tp)
+ }
+ }
+
/** A prototype for mapping a function over all possible types
*/
- abstract class TypeMap extends Function1[Type, Type] {
+ abstract class TypeMap extends (Type => Type) {
def apply(tp: Type): Type
- /** The variance relative to start. If you want variances to be significant, set
- * variance = 1
- * at the top of the typemap.
+ /** Mix in VariantTypeMap if you want variances to be significant.
*/
- var variance = 0
+ def variance = 0
// #3731: return sym1 for which holds: pre bound sym.name to sym and
// pre1 now binds sym.name to sym1, conceptually exactly the same
@@ -3354,17 +3538,7 @@ A type's typeSymbol should never be inspected directly.
def mapOver(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
val pre1 = this(pre)
- //val args1 = args mapConserve this(_)
- val args1 =
- if (args.isEmpty)
- args
- else if (variance == 0) // fast & safe path: don't need to look at typeparams
- args mapConserve this
- else {
- val tparams = sym.typeParams
- if (tparams.isEmpty) args
- else mapOverArgs(args, tparams)
- }
+ val args1 = args mapConserve this
if ((pre1 eq pre) && (args1 eq args)) tp
else copyTypeRef(tp, pre1, coevolveSym(pre, pre1, sym), args1)
case ThisType(_) => tp
@@ -3376,16 +3550,12 @@ A type's typeSymbol should never be inspected directly.
else singleType(pre1, sym)
}
case MethodType(params, result) =>
- variance = -variance
val params1 = mapOver(params)
- variance = -variance
val result1 = this(result)
if ((params1 eq params) && (result1 eq result)) tp
else copyMethodType(tp, params1, result1.substSym(params, params1))
case PolyType(tparams, result) =>
- variance = -variance
val tparams1 = mapOver(tparams)
- variance = -variance
var result1 = this(result)
if ((tparams1 eq tparams) && (result1 eq result)) tp
else PolyType(tparams1, result1.substSym(tparams, tparams1))
@@ -3400,9 +3570,7 @@ A type's typeSymbol should never be inspected directly.
if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
else SuperType(thistp1, supertp1)
case TypeBounds(lo, hi) =>
- variance = -variance
val lo1 = this(lo)
- variance = -variance
val hi1 = this(hi)
if ((lo1 eq lo) && (hi1 eq hi)) tp
else TypeBounds(lo1, hi1)
@@ -3420,7 +3588,7 @@ A type's typeSymbol should never be inspected directly.
val tparams1 = mapOver(tparams)
var result1 = this(result)
if ((tparams1 eq tparams) && (result1 eq result)) tp
- else ExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
case OverloadedType(pre, alts) =>
val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
if (pre1 eq pre) tp
@@ -3458,15 +3626,14 @@ A type's typeSymbol should never be inspected directly.
// throw new Error("mapOver inapplicable for " + tp);
}
- protected final def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- map2Conserve(args, tparams) { (arg, tparam) =>
- val v = variance
- if (tparam.isContravariant) variance = -variance
- else if (!tparam.isCovariant) variance = 0
- val arg1 = this(arg)
- variance = v
- arg1
- }
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ args mapConserve this
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned. Overridden in VariantTypeMap.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]) =
+ origSyms forall (sym => sym.info eq this(sym.info))
/** Map this function over given scope */
def mapOver(scope: Scope): Scope = {
@@ -3475,20 +3642,13 @@ A type's typeSymbol should never be inspected directly.
if (elems1 eq elems) scope
else new Scope(elems1)
}
-
+
/** Map this function over given list of symbols */
def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- val change = origSyms exists { sym =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- result ne sym.info
- }
- // map is not the identity --> do cloning properly
- if (change) cloneSymbolsAndModify(origSyms, TypeMap.this)
// fast path in case nothing changes due to map
- else origSyms
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
}
def mapOver(annot: AnnotationInfo): AnnotationInfo = {
@@ -3555,13 +3715,25 @@ A type's typeSymbol should never be inspected directly.
}
}
- private val emptySymMap = immutable.Map[Symbol, Symbol]()
- private val emptySymCount = immutable.Map[Symbol, Int]()
+ /** A collector that tests for existential types appearing at given variance in a type
+ * @PP: Commenting out due to not being used anywhere.
+ */
+ // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
+ // variance = v
+ //
+ // def traverse(tp: Type) = tp match {
+ // case ExistentialType(_, _) if (variance == v) => result = true
+ // case _ => mapOver(tp)
+ // }
+ // }
+ //
+ // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
+ // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
- val eparams = for ((tparam, i) <- tparams.zipWithIndex) yield {
- clazz.newExistential(clazz.pos, newTypeName("?"+i)).setInfo(tparam.info.bounds)
- }
+ val eparams = mapWithIndex(tparams)((tparam, i) =>
+ clazz.newExistential(clazz.pos, newTypeName("?"+i)) setInfo tparam.info.bounds)
+
eparams map (_ substInfo (tparams, eparams))
}
@@ -3608,6 +3780,62 @@ A type's typeSymbol should never be inspected directly.
mapOver(tp)
}
}
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance == 0) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
+ if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
+ repl
+ else tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
@@ -3615,6 +3843,7 @@ A type's typeSymbol should never be inspected directly.
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
+ var capturedPre = emptySymMap
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
@@ -3645,8 +3874,6 @@ A type's typeSymbol should never be inspected directly.
annotationArgRewriter.transform(tree)
}
- var capturedPre = emptySymMap
-
def stabilize(pre: Type, clazz: Symbol): Type =
capturedPre.getOrElse(clazz, {
val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
@@ -3789,7 +4016,7 @@ A type's typeSymbol should never be inspected directly.
case PolyType(bs, restp) =>
createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
case ExistentialType(bs, restp) =>
- createFromClonedSymbols(bs, restp)(ExistentialType(_, _))
+ createFromClonedSymbols(bs, restp)(newExistentialType)
case _ =>
tp
}
@@ -4041,7 +4268,7 @@ A type's typeSymbol should never be inspected directly.
case WildcardType =>
TypeVar(tp, new TypeConstraint)
case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(List(bounds.lo), List(bounds.hi)))
+ TypeVar(tp, new TypeConstraint(bounds))
case _ =>
mapOver(tp)
}
@@ -4094,9 +4321,13 @@ A type's typeSymbol should never be inspected directly.
}
/** A map to implement the `filter` method. */
- class FilterTypeCollector(p: Type => Boolean) extends TypeCollector(new ListBuffer[Type]) {
+ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+ def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp))
+
+ override def collect(tp: Type) = super.collect(tp).reverse
+
def traverse(tp: Type) {
- if (p(tp)) result += tp
+ if (p(tp)) result ::= tp
mapOver(tp)
}
}
@@ -4180,10 +4411,14 @@ A type's typeSymbol should never be inspected directly.
definitions.RootPackage
} else if (sym.isModuleClass) {
val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
- val result = sourceModule1.moduleClass
- val msg = "sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
- assert(result != NoSymbol, msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
- result
+ var result = sourceModule1.moduleClass
+ if (result == NoSymbol) result = sourceModule1.initialize.moduleClass
+ if (result != NoSymbol) result
+ else {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
} else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) {
sym
} else {
@@ -4333,8 +4568,7 @@ A type's typeSymbol should never be inspected directly.
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- ((args1, args2, sym1.typeParams).zipped forall {
- (arg1, arg2, tparam) =>
+ forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
else if (arg1.isInstanceOf[TypeVar])
@@ -4344,7 +4578,7 @@ A type's typeSymbol should never be inspected directly.
// also: think what happens if there are embedded typevars?
if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
else true
- })
+ }
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
@@ -4867,19 +5101,11 @@ A type's typeSymbol should never be inspected directly.
// --> thus, cannot be subtypes (Any/Nothing has already been checked)
}))
- /** True if all three arguments have the same number of elements and
- * the function is true for all the triples.
- */
- @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C], f: (A, B, C) => Boolean): Boolean = {
- if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
- else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail, f)
- }
-
def isSubArg(t1: Type, t2: Type, variance: Int) =
(variance > 0 || t2 <:< t1) && (variance < 0 || t1 <:< t2)
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean =
- corresponds3(tps1, tps2, tparams map (_.variance), isSubArg)
+ corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
@@ -5251,7 +5477,6 @@ A type's typeSymbol should never be inspected directly.
def solve(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean, depth: Int): Boolean = {
- val config = tvars zip (tparams zip variances)
def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
if (tvar.constr.inst == NoType) {
@@ -5260,15 +5485,17 @@ A type's typeSymbol should never be inspected directly.
val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
//Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
var cyclic = bound contains tparam
- for ((tvar2, (tparam2, variance2)) <- config) {
- if (tparam2 != tparam &&
- ((bound contains tparam2) ||
- up && (tparam2.info.bounds.lo =:= tparam.tpe) ||
- !up && (tparam2.info.bounds.hi =:= tparam.tpe))) {
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpe)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpe)
+ )
+ if (ok) {
if (tvar2.constr.inst eq null) cyclic = true
solveOne(tvar2, tparam2, variance2)
}
- }
+ })
if (!cyclic) {
if (up) {
if (bound.typeSymbol != AnyClass)
@@ -5307,9 +5534,7 @@ A type's typeSymbol should never be inspected directly.
}
// println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- for ((tvar, (tparam, variance)) <- config)
- solveOne(tvar, tparam, variance)
-
+ foreach3(tvars, tparams, variances)(solveOne)
tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
}
@@ -5357,6 +5582,23 @@ A type's typeSymbol should never be inspected directly.
val formatted = tableDef.table(transposed)
println("** Depth is " + depth + "\n" + formatted)
}
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
/** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
* compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
@@ -5403,6 +5645,19 @@ A type's typeSymbol should never be inspected directly.
// merging, strip targs that refer to bound tparams (when we're computing the lub of type
// constructors.) Also filter out all types that are a subtype of some other type.
if (isUniformFrontier) {
+ if (settings.debug.value || printLubs) {
+ val fbounds = findRecursiveBounds(ts0)
+ if (fbounds.nonEmpty) {
+ println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
+ for ((p0, p1) <- fbounds) {
+ val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
+
+ println(" " + p0.fullLocationString + " appears in " + desc)
+ println(" " + p1 + " " + p1.info.bounds)
+ }
+ println("")
+ }
+ }
val tails = tsBts map (_.tail)
mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
case Some(tp) => tp :: loop(tails)
@@ -5428,7 +5683,7 @@ A type's typeSymbol should never be inspected directly.
}
}
- val initialBTSes = ts map (_.baseTypeSeq.toList)
+ val initialBTSes = ts map (_.baseTypeSeq.toList filter (_.typeSymbol.isPublic))
if (printLubs)
printLubMatrix(ts zip initialBTSes toMap, depth)
@@ -5471,22 +5726,11 @@ A type's typeSymbol should never be inspected directly.
case _ =>
t
}
-
- /** A collector that tests for existential types appearing at given variance in a type */
- class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) {
- def traverse(tp: Type) = tp match {
- case ExistentialType(_, _) if (variance == v) => result = true
- case _ => mapOver(tp)
- }
- def init() = {
- variance = 1
- this
- }
+ def elimRefinement(t: Type) = t match {
+ case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
+ case _ => t
}
- val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
- val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
-
/** Eliminate from list of types all elements which are a subtype
* of some other element of the list. */
private def elimSub(ts: List[Type], depth: Int): List[Type] = {
@@ -5603,8 +5847,8 @@ A type's typeSymbol should never be inspected directly.
case List() => NothingClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
@@ -5632,9 +5876,16 @@ A type's typeSymbol should never be inspected directly.
val lubType =
if (phase.erasedTypes || depth == 0) lubBase
else {
- val lubRefined = refinedType(lubParents, lubOwner)
+ val lubRefined = refinedType(lubParents, lubOwner)
val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || narrowts.exists(t => !refines(t, sym))
+ )
def lubsym(proto: Symbol): Symbol = {
val prototp = lubThisType.memberInfo(proto)
val syms = narrowts map (t =>
@@ -5643,7 +5894,7 @@ A type's typeSymbol should never be inspected directly.
if (syms contains NoSymbol) NoSymbol
else {
val symtypes =
- (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
else if (symtypes.tail forall (symtypes.head =:=))
@@ -5663,16 +5914,15 @@ A type's typeSymbol should never be inspected directly.
// efficiency.
alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
}
- for (sym <- lubBase.nonPrivateMembers) {
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- if (!sym.isClass && !sym.isConstructor && !isGetClass(sym) && (narrowts forall (t => refines(t, sym))))
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lubsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try {
+ val lsym = lubsym(sym)
+ if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym)
+ } catch {
+ case ex: NoCommonType =>
+ }
}
if (lubRefined.decls.isEmpty) lubBase
else if (!verifyLubs) lubRefined
@@ -5705,7 +5955,7 @@ A type's typeSymbol should never be inspected directly.
}
val res = lub0(ts)
if (printLubs) {
- indent = indent dropRight 2
+ indent = indent stripSuffix " "
println(indent + "lub of " + ts + " is " + res)//debug
}
if (ts forall (_.isNotNull)) res.notNull else res
@@ -5748,8 +5998,8 @@ A type's typeSymbol should never be inspected directly.
case List() => AnyClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, glbNorm(matchingRestypes(ts, params map (_.tpe)), depth))
@@ -5880,38 +6130,39 @@ A type's typeSymbol should never be inspected directly.
else if (args exists (arg => isValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
else Some(typeRef(pre, sym, List(lub(args))))
}
- } else {
- val args = (sym.typeParams, argss.transpose).zipped map { (tparam, as) =>
- if (depth == 0) {
- if (tparam.variance == variance) {
- // Take the intersection of the upper bounds of the type parameters
- // rather than falling all the way back to "Any", otherwise we end up not
- // conforming to bounds.
- val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
- if (bounds0.isEmpty) AnyClass.tpe
- else intersectionType(bounds0)
- }
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
+ }
+ else {
+ val args = map2(sym.typeParams, argss.transpose) { (tparam, as) =>
+ if (depth == 0) {
+ if (tparam.variance == variance) {
+ // Take the intersection of the upper bounds of the type parameters
+ // rather than falling all the way back to "Any", otherwise we end up not
+ // conforming to bounds.
+ val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
+ if (bounds0.isEmpty) AnyClass.tpe
+ else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
}
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ }
+ else {
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
else {
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
-
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
+
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
}
}
}
+ }
if (args contains NoType) None
else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
}
@@ -5996,148 +6247,6 @@ A type's typeSymbol should never be inspected directly.
throw new NoCommonType(tps)
}
-
- // TODO: this desperately needs to be cleaned up
- // plan: split into kind inference and subkinding
- // every Type has a (cached) Kind
- def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
- checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
-
- /** Check well-kindedness of type application (assumes arities are already checked) -- @M
- *
- * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
- * (checked one type member at a time -- in that case, prefix is the name of the type alias)
- *
- * Type application is just like value application: it's "contravariant" in the sense that
- * the type parameters of the supplied type arguments must conform to the type parameters of
- * the required type parameters:
- * - their bounds must be less strict
- * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
- * - @M TODO: are these conditions correct,sufficient&necessary?
- *
- * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
- * List's type parameter is also covariant and its bounds are weaker than <: Int
- */
- def checkKindBounds0(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol, explainErrors: Boolean): List[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] = {
- var error = false
-
- def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
- def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
-
- /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
- *
- * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
- */
- def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
-
- // check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(
- hkargs: List[Symbol],
- arg: Symbol,
- param: Symbol,
- paramowner: Symbol,
- underHKParams: List[Symbol],
- withHKArgs: List[Symbol]
- ): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
-
- def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
- // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
- val hkparams = param.typeParams
-
- if (settings.debug.value) {
- log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
- log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
- log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
- }
-
- if (!sameLength(hkargs, hkparams)) {
- if (arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
- else {error = true; (List((arg, param)), Nil, Nil) } // shortcut: always set error, whether explainTypesOrNot
- }
- else {
- val _arityMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _varianceMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _stricterBounds = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
-
- def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true}
- def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true }
- def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as }
- def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _varianceMismatches ++= as }
- def stricterBounds(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _stricterBounds ++= as }
-
- for ((hkarg, hkparam) <- hkargs zip hkparams) {
- if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
- if (!variancesMatch(hkarg, hkparam))
- varianceMismatch(hkarg, hkparam)
-
- // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
- // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
- // --> their arguments use different symbols, but are conceptually the same
- // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- val declaredBounds = transformedBounds(hkparam, paramowner)
- val declaredBoundsInst = bindHKParams(declaredBounds)
- val argumentBounds = transform(hkarg.info.bounds, owner)
- if (!(declaredBoundsInst <:< argumentBounds))
- stricterBound(hkarg, hkparam)
-
- debuglog(
- "checkKindBoundsHK base case: " + hkparam +
- " declared bounds: " + declaredBounds +
- " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
- "checkKindBoundsHK base case: "+ hkarg +
- " has bounds: " + argumentBounds
- )
- }
- else {
- debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
- val (am, vm, sb) = checkKindBoundsHK(
- hkarg.typeParams,
- hkarg,
- hkparam,
- paramowner,
- underHKParams ++ hkparam.typeParams,
- withHKArgs ++ hkarg.typeParams
- )
- arityMismatches(am)
- varianceMismatches(vm)
- stricterBounds(sb)
- }
- if (!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
- }
- if (!explainErrors) (Nil, Nil, Nil)
- else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
- }
- }
-
- val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])]
- if (settings.debug.value &&(tparams.nonEmpty || targs.nonEmpty))
- log("checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")")
-
- for {
- (tparam, targ) <- tparams zip targs
- // Prevent WildcardType from causing kind errors, as typevars may be higher-order
- if (targ != WildcardType) && (targ.isHigherKinded || tparam.typeParams.nonEmpty)
- } {
- // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
- targ.typeSymbolDirect.info // force symbol load for #4205
- val tparamsHO = targ.typeParams
-
- val (arityMismatches, varianceMismatches, stricterBounds) = (
- // NOTE: *not* targ.typeSymbol, which normalizes
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO)
- )
- if (explainErrors) {
- if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
- errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
- }
- }
- else if (error)
- return List((NoType, NoSymbol, Nil, Nil, Nil))
- }
-
- errors.toList
- }
-
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
@@ -6164,7 +6273,7 @@ A type's typeSymbol should never be inspected directly.
Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
indent = indent + " "
val result = p(tp1, arg2)
- indent = indent dropRight 2
+ indent = indent stripSuffix " "
Console.println(indent + result)
result
}
diff --git a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
index 9aa3d8a2c3..0789f9c774 100644
--- a/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/compiler/scala/reflect/internal/pickling/UnPickler.scala
@@ -46,7 +46,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
}
}
- class Scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
+ class Scan(_bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
//println("unpickle " + classRoot + " and " + moduleRoot)//debug
protected def debug = settings.debug.value
@@ -184,6 +184,8 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
case _ => errorBadSignature("bad name tag: " + tag)
}
}
+ protected def readTermName(): TermName = readName().toTermName
+ protected def readTypeName(): TypeName = readName().toTypeName
/** Read a symbol */
protected def readSymbol(): Symbol = {
@@ -211,7 +213,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
return NoSymbol
if (tag == EXTMODCLASSref) {
- val moduleVar = owner.info.decl(nme.moduleVarName(name))
+ val moduleVar = owner.info.decl(nme.moduleVarName(name.toTermName))
if (moduleVar.isLazyAccessor)
return moduleVar.lazyAccessor.lazyAccessor
}
@@ -223,7 +225,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
// (2) Try with expanded name. Can happen if references to private
// symbols are read from outside: for instance when checking the children
// of a class. See #1722.
- fromName(nme.expandedName(name, owner)) orElse {
+ fromName(nme.expandedName(name.toTermName, owner)) orElse {
// (3) Try as a nested object symbol.
nestedObjectSymbol orElse {
// (4) Otherwise, fail.
@@ -296,14 +298,14 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
if (isModuleRoot) moduleRoot
else {
- val m = owner.newModule(name, clazz)
+ val m = owner.newModule(name.toTermName, clazz)
clazz.sourceModule = m
m
}
case VALsym =>
if (isModuleRoot) { assert(false); NoSymbol }
- else if (isMethodFlag) owner.newMethod(name)
- else owner.newValue(name)
+ else if (isMethodFlag) owner.newMethod(name.toTermName)
+ else owner.newValue(name.toTermName)
case _ =>
errorBadSignature("bad symbol tag: " + tag)
@@ -378,7 +380,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
// that it isn't right here. See #4757 for the immediate
// motivation to fix it.
val tparams = until(end, readSymbolRef) map (_ setFlag EXISTENTIAL)
- ExistentialType(tparams, restpe)
+ newExistentialType(tparams, restpe)
case ANNOTATEDtpe =>
var typeRef = readNat()
@@ -549,13 +551,13 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
case MODULEtree =>
setSymModsName()
- ModuleDef(mods, name, readTemplateRef())
+ ModuleDef(mods, name.toTermName, readTemplateRef())
case VALDEFtree =>
setSymModsName()
val tpt = readTreeRef()
val rhs = readTreeRef()
- ValDef(mods, name, tpt, rhs)
+ ValDef(mods, name.toTermName, tpt, rhs)
case DEFDEFtree =>
setSymModsName()
@@ -563,7 +565,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
val tpt = readTreeRef()
val rhs = readTreeRef()
- DefDef(mods, name, tparams, vparamss, tpt, rhs)
+ DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
case TYPEDEFtree =>
setSymModsName()
@@ -575,7 +577,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ {
setSymName()
val rhs = readTreeRef()
val params = until(end, readIdentRef)
- LabelDef(name, params, rhs)
+ LabelDef(name.toTermName, params, rhs)
case IMPORTtree =>
setSym()
diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala
new file mode 100644
index 0000000000..94672097c4
--- /dev/null
+++ b/src/compiler/scala/reflect/internal/util/Collections.scala
@@ -0,0 +1,158 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.internal.util
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+import mutable.ListBuffer
+
+/** Profiler driven changes.
+ */
+trait Collections {
+ /** True if all three arguments have the same number of elements and
+ * the function is true for all the triples.
+ */
+ @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])
+ (f: (A, B, C) => Boolean): Boolean = (
+ if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
+ else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail)(f)
+ )
+
+ /** All these mm methods are "deep map" style methods for
+ * mapping etc. on a list of lists.
+ */
+ final def mexists[A](xss: List[List[A]])(p: A => Boolean) =
+ xss exists (_ exists p)
+ final def mmap[A, B](xss: List[List[A]])(f: A => B) =
+ xss map (_ map f)
+ final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
+ xss foreach (_ foreach f)
+ final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
+ for (xs <- xss; x <- xs)
+ if (p(x)) return Some(x)
+ None
+ }
+ final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
+ for (xs <- xss; x <- xs; if p(x)) yield x
+
+ final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb += f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+ final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
+ if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
+ else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
+ }
+ final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb ++= f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+
+ final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
+ val lb = new ListBuffer[B]
+ var index = 0
+ var ys = xs
+ while (!ys.isEmpty) {
+ lb += f(ys.head, index)
+ ys = ys.tail
+ index += 1
+ }
+ lb.toList
+ }
+ final def collectMap2[A, B, C](xs1: List[A], xs2: List[B])(p: (A, B) => Boolean): Map[A, B] = {
+ if (xs1.isEmpty || xs2.isEmpty)
+ return Map()
+
+ val buf = immutable.Map.newBuilder[A, B]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ val x1 = ys1.head
+ val x2 = ys2.head
+ if (p(x1, x2))
+ buf += ((x1, x2))
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ buf.result
+ }
+ final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ }
+ final def foreach3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ f(ys1.head, ys2.head, ys3.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ }
+ final def exists2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (f(ys1.head, ys2.head))
+ return true
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ false
+ }
+ final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (!f(ys1.head, ys2.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ true
+ }
+ final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ if (!f(ys1.head, ys2.head, ys3.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ true
+ }
+}
+
+object Collections extends Collections { }
+
diff --git a/src/compiler/scala/tools/nsc/util/Origins.scala b/src/compiler/scala/reflect/internal/util/Origins.scala
index f8ba34ae3c..b9985c8f50 100644
--- a/src/compiler/scala/tools/nsc/util/Origins.scala
+++ b/src/compiler/scala/reflect/internal/util/Origins.scala
@@ -3,10 +3,12 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
-package util
+package scala.reflect
+package internal.util
-import scala.reflect.NameTransformer._
+import NameTransformer._
+import scala.collection.{ mutable, immutable }
+import Origins._
/** A debugging class for logging from whence a method is being called.
* Say you wanted to discover who was calling phase_= in SymbolTable.
@@ -33,10 +35,6 @@ import scala.reflect.NameTransformer._
}}}
*
*/
-
-import scala.collection.{ mutable, immutable }
-import Origins._
-
abstract class Origins {
type Rep
def newRep(xs: StackSlice): Rep
@@ -94,7 +92,9 @@ object Origins {
def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz))
def apply(tag: String, orElse: => Origins): Origins = {
counters find (_.tag == tag) getOrElse {
- returning(orElse setTag tag)(counters += _)
+ val res = orElse setTag tag
+ counters += res
+ res
}
}
diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala
index 5297ea6db4..9da75bf2b0 100644
--- a/src/compiler/scala/reflect/runtime/JavaToScala.scala
+++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala
@@ -34,8 +34,10 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
val global: JavaToScala.this.type = self
}
- protected def defaultReflectiveClassLoader(): JClassLoader =
- Thread.currentThread.getContextClassLoader
+ protected def defaultReflectiveClassLoader(): JClassLoader = {
+ val cl = Thread.currentThread.getContextClassLoader
+ if (cl == null) getClass.getClassLoader else cl
+ }
/** Paul: It seems the default class loader does not pick up root classes, whereas the system classloader does.
* Can you check with your newly acquired classloader fu whether this implementation makes sense?
@@ -51,7 +53,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
javaClass(path)
true
} catch {
- case (_: ClassNotFoundException) | (_: NoClassDefFoundError) =>
+ case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
false
}
@@ -270,7 +272,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
*/
private def approximateMatch(sym: Symbol, jstr: String): Boolean =
(sym.name.toString == jstr) ||
- sym.isPrivate && nme.expandedName(sym.name, sym.owner).toString == jstr
+ sym.isPrivate && nme.expandedName(sym.name.toTermName, sym.owner).toString == jstr
/**
* Find declarations or definition in class `clazz` that maps to a Java
@@ -351,32 +353,28 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable =>
* not available, wrapped from the Java reflection info.
*/
def classToScala(jclazz: jClass[_]): Symbol = classCache.toScala(jclazz) {
- if (jclazz.isMemberClass && !nme.isImplClassName(jclazz.getName)) {
- val sym = sOwner(jclazz).info.decl(newTypeName(jclazz.getSimpleName))
+ val jname = javaTypeName(jclazz)
+ def lookup = sOwner(jclazz).info.decl(newTypeName(jclazz.getSimpleName))
+
+ if (jclazz.isMemberClass && !nme.isImplClassName(jname)) {
+ val sym = lookup
assert(sym.isType, sym+"/"+jclazz+"/"+sOwner(jclazz)+"/"+jclazz.getSimpleName)
sym.asInstanceOf[ClassSymbol]
- } else if (jclazz.isLocalClass || invalidClassName(jclazz.getName)) {
+ }
+ else if (jclazz.isLocalClass || invalidClassName(jname)) {
// local classes and implementation classes not preserved by unpickling - treat as Java
jclassAsScala(jclazz)
- } else if (jclazz.isArray) {
+ }
+ else if (jclazz.isArray) {
ArrayClass
- } else jclazz match {
- case java.lang.Void.TYPE => UnitClass
- case java.lang.Byte.TYPE => ByteClass
- case java.lang.Character.TYPE => CharClass
- case java.lang.Short.TYPE => ShortClass
- case java.lang.Integer.TYPE => IntClass
- case java.lang.Long.TYPE => LongClass
- case java.lang.Float.TYPE => FloatClass
- case java.lang.Double.TYPE => DoubleClass
- case java.lang.Boolean.TYPE => BooleanClass
- case _ =>
- // jclazz is top-level - get signature
- sOwner(jclazz).info decl newTypeName(jclazz.getSimpleName)
-// val (clazz, module) = createClassModule(
-// sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _))
-// classCache enter (jclazz, clazz)
-// clazz
+ }
+ else javaTypeToValueClass(jclazz) orElse {
+ // jclazz is top-level - get signature
+ lookup
+ // val (clazz, module) = createClassModule(
+ // sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _))
+ // classCache enter (jclazz, clazz)
+ // clazz
}
}
diff --git a/src/compiler/scala/reflect/runtime/ScalaToJava.scala b/src/compiler/scala/reflect/runtime/ScalaToJava.scala
index b1e4d6224c..405a00de8d 100644
--- a/src/compiler/scala/reflect/runtime/ScalaToJava.scala
+++ b/src/compiler/scala/reflect/runtime/ScalaToJava.scala
@@ -29,17 +29,7 @@ trait ScalaToJava extends ConversionUtil { self: SymbolTable =>
def noClass = throw new ClassNotFoundException("no Java class corresponding to "+clazz+" found")
//println("classToJava "+clazz+" "+clazz.owner+" "+clazz.owner.isPackageClass)//debug
if (clazz.isValueClass)
- clazz match {
- case UnitClass => java.lang.Void.TYPE
- case ByteClass => java.lang.Byte.TYPE
- case CharClass => java.lang.Character.TYPE
- case ShortClass => java.lang.Short.TYPE
- case IntClass => java.lang.Integer.TYPE
- case LongClass => java.lang.Long.TYPE
- case FloatClass => java.lang.Float.TYPE
- case DoubleClass => java.lang.Double.TYPE
- case BooleanClass => java.lang.Boolean.TYPE
- }
+ valueClassToJavaType(clazz)
else if (clazz == ArrayClass)
noClass
else if (clazz.owner.isPackageClass)
@@ -54,7 +44,7 @@ trait ScalaToJava extends ConversionUtil { self: SymbolTable =>
}
private def expandedName(sym: Symbol): String =
- if (sym.isPrivate) nme.expandedName(sym.name, sym.owner).toString
+ if (sym.isPrivate) nme.expandedName(sym.name.toTermName, sym.owner).toString
else sym.name.toString
def fieldToJava(fld: Symbol): jField = fieldCache.toJava(fld) {
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index e617239398..231bcdbc0e 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -33,25 +33,39 @@ trait ToolBoxes extends { self: Universe =>
private def nextWrapperModuleName() = {
wrapCount += 1
- "__wrapper$" + wrapCount
+ newTermName("__wrapper$" + wrapCount)
}
private def moduleFileName(className: String) = className + "$"
private def isFree(t: Tree) = t.isInstanceOf[Ident] && t.symbol.isInstanceOf[FreeVar]
+ def typedTopLevelExpr(tree: Tree, pt: Type): Tree = {
+ val ownerClass = EmptyPackageClass.newClass(newTypeName("<expression-owner>"))
+ ownerClass.setInfo(new ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
+ val owner = ownerClass.newLocalDummy(tree.pos)
+ typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt)
+ }
+
+ def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
+ case Some(sym) if sym != null && sym != NoSymbol => sym.owner
+ case _ => NoSymbol
+ }
+
def wrapInObject(expr: Tree, fvs: List[Symbol]): ModuleDef = {
val obj = EmptyPackageClass.newModule(NoPosition, nextWrapperModuleName())
- val minfo = ClassInfoType(List(ObjectClass.tpe), new Scope, obj.moduleClass)
+ val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), new Scope, obj.moduleClass)
obj.moduleClass setInfo minfo
obj setInfo obj.moduleClass.tpe
- val meth = obj.moduleClass.newMethod(NoPosition, wrapperMethodName)
- meth setFlag Flags.STATIC
- def makeParam(fv: Symbol) = meth.newValueParameter(NoPosition, fv.name) setInfo fv.tpe
+ val meth = obj.moduleClass.newMethod(NoPosition, newTermName(wrapperMethodName))
+ def makeParam(fv: Symbol) = meth.newValueParameter(NoPosition, fv.name.toTermName) setInfo fv.tpe
meth setInfo MethodType(fvs map makeParam, expr.tpe)
minfo.decls enter meth
- val methdef = DefDef(meth, expr)
- val objdef = ModuleDef(
+ trace("wrapping ")(defOwner(expr) -> meth)
+ val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth))
+ trace("wrapped: ")(showAttributed(methdef))
+ resetAllAttrs(
+ ModuleDef(
obj,
Template(
List(TypeTree(ObjectClass.tpe)),
@@ -60,8 +74,7 @@ trait ToolBoxes extends { self: Universe =>
List(),
List(List()),
List(methdef),
- NoPosition))
- resetAllAttrs(objdef)
+ NoPosition)))
}
def wrapInPackage(clazz: Tree): PackageDef =
@@ -88,17 +101,35 @@ trait ToolBoxes extends { self: Universe =>
def runExpr(expr: Tree): Any = {
val etpe = expr.tpe
val fvs = (expr filter isFree map (_.symbol)).distinct
+
+ reporter.reset()
val className = compileExpr(expr, fvs)
+ if (reporter.hasErrors) {
+ throw new Error("reflective compilation has failed")
+ }
+
if (settings.debug.value) println("generated: "+className)
val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
- val result = jmeth.invoke(null, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*)
+ val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
+ val singleton = jfield.get(null)
+ val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*)
if (etpe.typeSymbol != FunctionClass(0)) result
else {
val applyMeth = result.getClass.getMethod("apply")
applyMeth.invoke(result)
}
}
+
+ def showAttributed(tree: Tree): String = {
+ val saved = settings.printtypes.value
+ try {
+ settings.printtypes.value = true
+ //settings.uniqid.value = true
+ tree.toString
+ } finally
+ compiler.settings.printtypes.value = saved
+ }
}
lazy val arguments = options.split(" ")
@@ -127,7 +158,7 @@ trait ToolBoxes extends { self: Universe =>
lazy val exporter = importer.reverse
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader)
-
+
private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = {
// need to establish a run an phase because otherwise we run into an assertion in TypeHistory
// that states that the period must be different from NoPeriod
@@ -135,7 +166,8 @@ trait ToolBoxes extends { self: Universe =>
compiler.phase = run.refchecksPhase
val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree])
val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type])
- val ttree: compiler.Tree = compiler.typer.typed(ctree, compiler.analyzer.EXPRmode, pt)
+// val typer = compiler.typer.atOwner(ctree, if (owner.isModule) cowner.moduleClass else cowner)
+ val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt)
ttree
}
@@ -148,14 +180,8 @@ trait ToolBoxes extends { self: Universe =>
def typeCheck(tree: rm.Tree): rm.Tree =
typeCheck(tree, WildcardType.asInstanceOf[rm.Type])
- def showAttributed(tree: rm.Tree): String = {
- val saved = compiler.settings.printtypes.value
- try {
- compiler.settings.printtypes.value = true
- importer.importTree(tree.asInstanceOf[Tree]).toString
- } finally
- compiler.settings.printtypes.value = saved
- }
+ def showAttributed(tree: rm.Tree): String =
+ compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]))
def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = {
val ttree = importAndTypeCheck(tree, expectedType)
diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
index 8c9e6a2565..9d66ca6c6e 100644
--- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
+++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala
@@ -3,11 +3,9 @@ package runtime
trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
- def staticClass(fullname: String): Symbol = definitions.getClass(newTypeName(fullname))
- def staticModule(fullname: String): Symbol = definitions.getModule(newTermName(fullname))
-
- def thisModuleType(fullname: String) =
- definitions.getModule(fullname).moduleClass.thisType
+ def staticClass(fullname: String): Symbol = definitions.getRequiredClass(fullname)
+ def staticModule(fullname: String): Symbol = definitions.getRequiredModule(fullname)
+ def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.thisType
/** Selects type symbol with given name from the defined members of prefix type
*/
@@ -41,7 +39,7 @@ trait TreeBuildUtil extends Universe with api.TreeBuildUtil {
selectIn(owner.info, idx)
}
- def freeVar(name: String, info: Type, value: Any) = new FreeVar(name, info, value)
+ def freeVar(name: String, info: Type, value: Any) = new FreeVar(newTermName(name), info, value)
def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers =
Modifiers(flags, privateWithin, annotations)
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index c59d46683e..9f1fbc4524 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -86,3 +86,4 @@ goto :eof
:end
@@endlocal
+exit /b %errorlevel%
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 470207fd35..940d115b2f 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -74,6 +74,9 @@ trait CompilationUnits { self: Global =>
* It is empty up to phase 'icode'.
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
+
+ def echo(pos: Position, msg: String) =
+ reporter.echo(pos, msg)
def error(pos: Position, msg: String) =
reporter.error(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index b10ac78ac7..6393ade146 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -136,9 +136,9 @@ class StandardCompileServer extends SocketServer {
}
if (command.shouldStopWithInfo)
- reporter.info(null, command.getInfoMessage(newGlobal(newSettings, reporter)), true)
+ reporter.echo(command.getInfoMessage(newGlobal(newSettings, reporter)))
else if (command.files.isEmpty)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else {
if (isCompilerReusable) {
info("[Reusing existing Global instance.]")
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index db95c1442b..0c52954a0b 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -24,8 +24,8 @@ abstract class Driver {
protected def doCompile(compiler: Global) {
if (command.files.isEmpty) {
- reporter.info(null, command.usageMsg, true)
- reporter.info(null, compiler.pluginOptionsHelp, true)
+ reporter.echo(command.usageMsg)
+ reporter.echo(compiler.pluginOptionsHelp)
} else {
val run = new compiler.Run()
run compile command.files
@@ -40,14 +40,14 @@ abstract class Driver {
settings = command.settings
if (settings.version.value) {
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
} else if (processSettingsHook()) {
val compiler = newCompiler()
try {
if (reporter.hasErrors)
reporter.flush()
else if (command.shouldStopWithInfo)
- reporter.info(null, command.getInfoMessage(compiler), true)
+ reporter.echo(command.getInfoMessage(compiler))
else
doCompile(compiler)
} catch {
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index b4f14dd21b..c388a62644 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -39,6 +39,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
with Trees
with TreePrinters
with DocComments
+ with MacroContext
with symtab.Positions {
override def settings = currentSettings
@@ -151,18 +152,24 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
/** Register top level class (called on entering the class)
*/
def registerTopLevelSym(sym: Symbol) {}
-
+
// ------------------ Reporting -------------------------------------
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
def error(msg: String) = globalError(msg)
def globalError(msg: String) = reporter.error(NoPosition, msg)
- def inform(msg: String) = reporter.info(NoPosition, msg, true)
+ def inform(msg: String) = reporter.echo(msg)
def warning(msg: String) =
if (opt.fatalWarnings) globalError(msg)
else reporter.warning(NoPosition, msg)
+ // Needs to call error to make sure the compile fails.
+ override def abort(msg: String): Nothing = {
+ error(msg)
+ super.abort(msg)
+ }
+
@inline final def ifDebug(body: => Unit) {
if (settings.debug.value)
body
@@ -343,7 +350,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
def run() {
echoPhaseSummary(this)
- currentRun.units foreach applyPhase
+ currentRun.units foreach { unit =>
+ if (opt.timings) {
+ val start = System.nanoTime
+ try applyPhase(unit)
+ finally unitTimings(unit) += (System.nanoTime - start)
+ }
+ else applyPhase(unit)
+ }
}
def apply(unit: CompilationUnit): Unit
@@ -669,6 +683,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
protected lazy val phasesSet = new mutable.HashSet[SubComponent]
protected lazy val phasesDescMap = new mutable.HashMap[SubComponent, String] withDefaultValue ""
private lazy val phaseTimings = new Phases.TimingModel // tracking phase stats
+ private lazy val unitTimings = mutable.HashMap[CompilationUnit, Long]() withDefaultValue 0L // tracking time spent per unit
+ private def unitTimingsFormatted(): String = {
+ def toMillis(nanos: Long) = "%.3f" format nanos / 1000000d
+
+ val formatter = new util.TableDef[(String, String)] {
+ >> ("ms" -> (_._1)) >+ " "
+ << ("path" -> (_._2))
+ }
+ "" + (
+ new formatter.Table(unitTimings.toList sortBy (-_._2) map {
+ case (unit, nanos) => (toMillis(nanos), unit.source.path)
+ })
+ )
+ }
+
protected def addToPhasesSet(sub: SubComponent, descr: String) {
phasesSet += sub
phasesDescMap(sub) = descr
@@ -1149,8 +1178,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
if (opt.profileAll)
profiler.stopProfiling()
- if (opt.timings)
+ if (opt.timings) {
inform(phaseTimings.formatted)
+ inform(unitTimingsFormatted)
+ }
// In case no phase was specified for -Xshow-class/object, show it now for sure.
if (opt.noShow)
@@ -1177,8 +1208,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
perRunCaches.clearAll()
// Reset project
- atPhase(namerPhase) {
- resetProjectClasses(definitions.RootClass)
+ if (!stopPhase("namer")) {
+ atPhase(namerPhase) {
+ resetProjectClasses(definitions.RootClass)
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/MacroContext.scala b/src/compiler/scala/tools/nsc/MacroContext.scala
new file mode 100644
index 0000000000..e739eade3a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/MacroContext.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc
+
+import symtab.Flags._
+
+trait MacroContext extends reflect.api.MacroContext { self: Global =>
+
+ def captureVariable(vble: Symbol): Unit = vble setFlag CAPTURED
+
+ def referenceCapturedVariable(id: Ident): Tree = ReferenceToBoxed(id)
+}
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index f25ea6fe5e..a627b982b6 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -185,7 +185,7 @@ trait PhaseAssembly {
* dependency on something that is dropped.
*/
def removeDanglingNodes() {
- for (node <- nodes.valuesIterator filter (_.phaseobj.isEmpty)) {
+ for (node <- nodes.values filter (_.phaseobj.isEmpty)) {
val msg = "dropping dependency on node with no phase object: "+node.phasename
informProgress(msg)
nodes -= node.phasename
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index d33be5bca0..c83ccfeef1 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -22,9 +22,4 @@ object Properties extends scala.util.PropertiesTrait {
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
def fileEndings = fileEndingString.split("""\|""").toList
-
- // System property java.home is the JRE root.
- // Environment variable JAVA_HOME is (supposed to be) the jdk root.
- // We need the latter to find javac, tools.jar, etc.
- def jdkHome = envOrElse("JAVA_HOME", javaHome)
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index a9330b053b..4fa2cc71e5 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -30,17 +30,17 @@ class ScalaDoc {
def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
if (docSettings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else if (docSettings.Xhelp.value)
- reporter.info(null, command.xusageMsg, true)
+ reporter.echo(command.xusageMsg)
else if (docSettings.Yhelp.value)
- reporter.info(null, command.yusageMsg, true)
+ reporter.echo(command.yusageMsg)
else if (docSettings.showPlugins.value)
reporter.warning(null, "Plugins are not available when using Scaladoc")
else if (docSettings.showPhases.value)
reporter.warning(null, "Phases are restricted when using Scaladoc")
else if (docSettings.help.value || !hasFiles)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else try {
if (docSettings.target.value == "msil")
msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 9c598bca41..6a6379cca2 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -99,9 +99,9 @@ trait DocComments { self: Global =>
*/
def useCases(sym: Symbol, site: Symbol): List[(Symbol, String, Position)] = {
def getUseCases(dc: DocComment) = {
- for (uc <- dc.useCases; defn <- uc.expandedDefs(site)) yield
+ for (uc <- dc.useCases; defn <- uc.expandedDefs(sym, site)) yield
(defn,
- expandVariables(merge(cookedDocComment(sym), uc.comment.raw, defn, copyFirstPara = true), sym, site),
+ expandVariables(merge(cookedDocComment(sym), uc.comment.raw, defn), sym, site),
uc.pos)
}
getDocComment(sym) map getUseCases getOrElse List()
@@ -220,8 +220,8 @@ trait DocComments { self: Global =>
else site.info.baseClasses
searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
- case Some(str) if str startsWith '$' => lookupVariable(str.tail, site)
- case res => res orElse lookupVariable(vble, site.owner)
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
}
}
@@ -346,7 +346,7 @@ trait DocComments { self: Global =>
var defined: List[Symbol] = List() // initialized by Typer
var aliases: List[Symbol] = List() // initialized by Typer
- def expandedDefs(site: Symbol): List[Symbol] = {
+ def expandedDefs(sym: Symbol, site: Symbol): List[Symbol] = {
def select(site: Type, name: Name, orElse: => Type): Type = {
val member = site.nonPrivateMember(name)
@@ -397,7 +397,7 @@ trait DocComments { self: Global =>
if (tpe != NoType) tpe
else {
val alias1 = alias.cloneSymbol(definitions.RootClass)
- alias1.name = repl.toTypeName
+ alias1.name = newTypeName(repl)
typeRef(NoPrefix, alias1, Nil)
}
case None =>
@@ -424,8 +424,10 @@ trait DocComments { self: Global =>
}
for (defn <- defined) yield {
- defn.cloneSymbol.setFlag(Flags.SYNTHETIC).setInfo(
- substAliases(defn.info).asSeenFrom(site.thisType, defn.owner))
+ val useCase = defn.cloneSymbol
+ useCase.owner = sym.owner
+ useCase.flags = sym.flags
+ useCase.setFlag(Flags.SYNTHETIC).setInfo(substAliases(defn.info).asSeenFrom(site.thisType, sym.owner))
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 752e3c6699..7b5de1f3dd 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -360,7 +360,7 @@ abstract class TreeBrowsers {
("Program", EMPTY)
case UnitTree(unit) =>
- ("CompilationUnit", unit.toString)
+ ("CompilationUnit", newTermName("" + unit))
case DocDef(comment, definition) =>
("DocDef", EMPTY)
@@ -441,7 +441,7 @@ abstract class TreeBrowsers {
("Apply", EMPTY)
case Super(qualif, mix) =>
- ("Super", "mix: " + mix)
+ ("Super", newTermName("mix: " + mix))
case This(qualifier) =>
("This", qualifier)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index efc64dbbc5..2cfd21ecc8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -202,7 +202,7 @@ trait TreeDSL {
class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
def symType = sym.tpe.finalResultType
def tparams = sym.typeParams map TypeDef
- def vparamss = sym.paramss map (xs => xs map ValDef)
+ def vparamss = mapParamss(sym)(ValDef)
}
class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
def symType = sym.tpe
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 0dc3b1fffd..e69c463e71 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -128,7 +128,7 @@ abstract class TreeGen extends reflect.internal.TreeGen {
def mkManifestFactoryCall(full: Boolean, constructor: String, tparg: Type, args: List[Tree]): Tree =
mkMethodCall(
if (full) FullManifestModule else PartialManifestModule,
- constructor,
+ newTermName(constructor),
List(tparg),
args
)
@@ -161,16 +161,10 @@ abstract class TreeGen extends reflect.internal.TreeGen {
* apply the element type directly.
*/
def mkWrapArray(tree: Tree, elemtp: Type) = {
- val sym = elemtp.typeSymbol
- val meth: Name =
- if (isValueClass(sym)) "wrap"+sym.name+"Array"
- else if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(sym)) "wrapRefArray"
- else "genericWrapArray"
-
mkMethodCall(
PredefModule,
- meth,
- if (isValueClass(sym)) Nil else List(elemtp),
+ wrapArrayMethodName(elemtp),
+ if (isScalaValueType(elemtp)) Nil else List(elemtp),
List(tree)
)
}
@@ -179,8 +173,8 @@ abstract class TreeGen extends reflect.internal.TreeGen {
* elem type elemtp to expected type pt.
*/
def mkCastArray(tree: Tree, elemtp: Type, pt: Type) =
- if (elemtp.typeSymbol == AnyClass && isValueClass(tree.tpe.typeArgs.head.typeSymbol))
- mkCast(mkRuntimeCall("toObjectArray", List(tree)), pt)
+ if (elemtp.typeSymbol == AnyClass && isScalaValueType(tree.tpe.typeArgs.head))
+ mkCast(mkRuntimeCall(nme.toObjectArray, List(tree)), pt)
else
mkCast(tree, pt)
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 85849cfad4..88a9b5e18b 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -44,6 +44,15 @@ trait Trees extends reflect.internal.Trees { self: Global =>
/** emitted by typer, eliminated by refchecks */
case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree
+
+ /** Marks underlying reference to id as boxed.
+ * @pre: id must refer to a captured variable
+ * A reference such marked will refer to the boxed entity, no dereferencing
+ * with `.elem` is done on it.
+ * This tree node can be emitted by macros such as reify that call markBoxedReference.
+ * It is eliminated in LambdaLift, where the boxing conversion takes place.
+ */
+ case class ReferenceToBoxed(idt: Ident) extends TermTree
// --- factory methods ----------------------------------------------------------
@@ -77,16 +86,17 @@ trait Trees extends reflect.internal.Trees { self: Global =>
}})
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val (lvdefs, gvdefs) = evdefs map {
+ val gvdefs = evdefs map {
case vdef @ ValDef(mods, name, tpt, rhs) =>
- val fld = treeCopy.ValDef(
+ treeCopy.ValDef(
vdef.duplicate, mods, name,
atPos(focusPos(vdef.pos)) { TypeTree() setOriginal tpt setPos focusPos(tpt.pos) }, // atPos in case
EmptyTree)
- val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
- (local, fld)
- } unzip
-
+ }
+ val lvdefs = evdefs map {
+ case vdef @ ValDef(mods, name, tpt, rhs) =>
+ treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
+ }
val constrs = {
if (constrMods hasFlag TRAIT) {
if (body forall treeInfo.isInterfaceMember) List()
@@ -151,6 +161,8 @@ trait Trees extends reflect.internal.Trees { self: Global =>
traverser.traverse(lhs); traverser.traverse(rhs)
case SelectFromArray(qualifier, selector, erasure) =>
traverser.traverse(qualifier)
+ case ReferenceToBoxed(idt) =>
+ traverser.traverse(idt)
case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
// (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
case _ => super.xtraverse(traverser, tree)
@@ -160,6 +172,7 @@ trait Trees extends reflect.internal.Trees { self: Global =>
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
+ def ReferenceToBoxed(tree: Tree, idt: Ident): ReferenceToBoxed
def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
}
@@ -173,6 +186,8 @@ trait Trees extends reflect.internal.Trees { self: Global =>
new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
+ def ReferenceToBoxed(tree: Tree, idt: Ident) =
+ new ReferenceToBoxed(idt).copyAttrs(tree)
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case dc@TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
}
@@ -194,6 +209,11 @@ trait Trees extends reflect.internal.Trees { self: Global =>
if (qualifier0 == qualifier) && (selector0 == selector) => t
case _ => this.treeCopy.SelectFromArray(tree, qualifier, selector, erasure)
}
+ def ReferenceToBoxed(tree: Tree, idt: Ident) = tree match {
+ case t @ ReferenceToBoxed(idt0)
+ if (idt0 == idt) => t
+ case _ => this.treeCopy.ReferenceToBoxed(tree, idt)
+ }
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case t @ TypeTreeWithDeferredRefCheck() => t
case _ => this.treeCopy.TypeTreeWithDeferredRefCheck(tree)
@@ -219,6 +239,9 @@ trait Trees extends reflect.internal.Trees { self: Global =>
case SelectFromArray(qualifier, selector, erasure) =>
transformer.treeCopy.SelectFromArray(
tree, transformer.transform(qualifier), selector, erasure)
+ case ReferenceToBoxed(idt) =>
+ transformer.treeCopy.ReferenceToBoxed(
+ tree, transformer.transform(idt) match { case idt1: Ident => idt1 })
case TypeTreeWithDeferredRefCheck() =>
transformer.treeCopy.TypeTreeWithDeferredRefCheck(tree)
}
@@ -232,63 +255,62 @@ trait Trees extends reflect.internal.Trees { self: Global =>
/** resets symbol and tpe fields in a tree, @see ResetAttrsTraverse
*/
- def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
- def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- /** A traverser which resets symbol and tpe fields of all nodes in a given tree
- * except for (1) TypeTree nodes, whose <code>.tpe</code> field is kept, and
- * (2) This(pkg) nodes, where pkg refers to a package symbol -- their attributes are kept, and
- * (3) if a <code>.symbol</code> field refers to a symbol which is defined
- * outside the tree, it is also kept.
- *
- * (2) is necessary because some This(pkg) are generated where pkg is not
- * an enclosing package.n In that case, resetting the symbol would cause the
- * next type checking run to fail. See #3152.
+// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
+// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
+
+ def resetAllAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(false).transformPoly(x)
+ def resetLocalAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(true).transformPoly(x)
+
+ /** A transformer which resets symbol and tpe fields of all nodes in a given tree,
+ * with special treatment of:
+ * TypeTree nodes: are replaced by their original if it exists, otherwise tpe field is reset
+ * to empty if it started out empty or refers to local symbols (which are erased).
+ * TypeApply nodes: are deleted if type arguments end up reverted to empty
+ * This(pkg) notes where pkg is a pckage: these are kept.
*
* (bq:) This traverser has mutable state and should be discarded after use
*/
- private class ResetAttrsTraverser extends Traverser {
- protected def isLocal(sym: Symbol): Boolean = true
- protected def resetDef(tree: Tree) {
+ private class ResetAttrsTransformer(localOnly: Boolean) extends Transformer {
+ private val erasedSyms = util.HashSet[Symbol](8)
+ private def resetDef(tree: Tree) {
+ if (tree.symbol != null && tree.symbol != NoSymbol)
+ erasedSyms addEntry tree.symbol
tree.symbol = NoSymbol
}
- override def traverse(tree: Tree): Unit = {
+ override def transform(tree: Tree): Tree = super.transform {
tree match {
+ case Template(_, _, body) =>
+ body foreach resetDef
+ resetDef(tree)
+ tree.tpe = null
+ tree
case _: DefTree | Function(_, _) | Template(_, _, _) =>
resetDef(tree)
tree.tpe = null
- tree match {
- case tree: DefDef => tree.tpt.tpe = null
- case _ => ()
- }
+ tree
case tpt: TypeTree =>
- if (tpt.wasEmpty) tree.tpe = null
+ if (tpt.original != null)
+ tpt.original
+ else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => erasedSyms contains tp.typeSymbol))))
+ tpt.tpe = null
+ tree
+ case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
+ fn
case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
- ;
+ tree
case EmptyTree =>
- ;
+ tree
case _ =>
- if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
+ if (tree.hasSymbol && (!localOnly || (erasedSyms contains tree.symbol)))
+ tree.symbol = NoSymbol
tree.tpe = null
+ tree
}
- super.traverse(tree)
- }
- }
-
- private class ResetLocalAttrsTraverser extends ResetAttrsTraverser {
- private val erasedSyms = util.HashSet[Symbol](8)
- override protected def isLocal(sym: Symbol) = erasedSyms(sym)
- override protected def resetDef(tree: Tree) {
- erasedSyms addEntry tree.symbol
- super.resetDef(tree)
}
- override def traverse(tree: Tree): Unit = tree match {
- case Template(parents, self, body) =>
- for (stat <- body)
- if (stat.isDef) erasedSyms.addEntry(stat.symbol)
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
+ def transformPoly[T <: Tree](x: T): T = {
+ val x1 = transform(x)
+ assert(x.getClass isInstance x1)
+ x1.asInstanceOf[T]
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index e27d5cacda..d7bfcfc314 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -314,7 +314,7 @@ self =>
val stmts = templateStatSeq(false)._2
accept(EOF)
- def mainModuleName = settings.script.value
+ def mainModuleName = newTermName(settings.script.value)
/** If there is only a single object template in the file and it has a
* suitable main method, we will use it rather than building another object
* around it. Since objects are loaded lazily the whole script would have
@@ -343,7 +343,7 @@ self =>
* whole additional parse. So instead, if the actual object's name differs from
* what the script is expecting, we transform it to match.
*/
- if (name.toString == mainModuleName) md
+ if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
/** If we see anything but the above, fail. */
@@ -352,7 +352,7 @@ self =>
Some(makePackaging(0, emptyPkg, newStmts))
}
- if (mainModuleName == ScriptRunner.defaultScriptMain)
+ if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
/** Here we are building an AST representing the following source fiction,
@@ -384,13 +384,13 @@ self =>
// def main
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
- def mainParameter = List(ValDef(Modifiers(Flags.PARAM), "argv", mainParamType, EmptyTree))
- def mainSetArgv = List(ValDef(NoMods, "args", TypeTree(), Ident("argv")))
+ def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
// object Main
- def moduleName = ScriptRunner scriptMain settings
+ def moduleName = newTermName(ScriptRunner scriptMain settings)
def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
@@ -980,6 +980,7 @@ self =>
nme.ERROR
}
def ident(): Name = ident(true)
+ def rawIdent(): Name = try in.name finally in.nextToken()
/** For when it's known already to be a type name. */
def identForType(): TypeName = ident().toTypeName
@@ -1117,7 +1118,7 @@ self =>
case LONGLIT => in.intVal(isNegated)
case FLOATLIT => in.floatVal(isNegated).toFloat
case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT => in.strVal
+ case STRINGLIT => in.strVal.intern()
case TRUE => true
case FALSE => false
case NULL => null
@@ -1465,8 +1466,9 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name: Name = "unary_" + ident()
- if (in.name == raw.MINUS && isNumericLit) simpleExprRest(atPos(in.offset)(literal(true)), true)
+ val name = nme.toUnaryName(rawIdent())
+ // val name = nme.toUnaryName(ident()) // val name: Name = "unary_" + ident()
+ if (name == nme.UNARY_- && isNumericLit) simpleExprRest(atPos(in.offset)(literal(true)), true)
else Select(stripParens(simpleExpr()), name)
}
}
@@ -1533,11 +1535,12 @@ self =>
case LBRACKET =>
val t1 = stripParens(t)
t1 match {
- case Ident(_) | Select(_, _) =>
- val tapp = atPos(t1.pos.startOrPoint, in.offset) {
- TypeApply(t1, exprTypeArgs())
- }
- simpleExprRest(tapp, true)
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ var app: Tree = t1
+ while (in.token == LBRACKET)
+ app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
+
+ simpleExprRest(app, true)
case _ =>
t1
}
@@ -1742,11 +1745,16 @@ self =>
* }}}
*/
def pattern1(): Tree = pattern2() match {
- case p @ Ident(name) if treeInfo.isVarPattern(p) && in.token == COLON =>
- atPos(p.pos.startOrPoint, in.skipToken()) { Typed(p, compoundType()) }
- case p =>
- p
+ case p @ Ident(name) if in.token == COLON =>
+ if (treeInfo.isVarPattern(p))
+ atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+ else {
+ syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
+ p
+ }
+ case p => p
}
+
/** {{{
* Pattern2 ::= varid [ @ Pattern3 ]
* | Pattern3
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index b8fa55447a..ffe65aec63 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -58,11 +58,11 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
private object xmltypes extends XMLTypeNames {
type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
+ implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
}
private object xmlterms extends XMLTermNames {
type NameType = TermName
- implicit def createNameType(name: String): TermName = newTermName(name)
+ implicit def createNameType(name: String): TermName = newTermNameCached(name)
}
import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index d80a1c4f34..27df45b563 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -39,9 +39,9 @@ trait JavaPlatform extends Platform {
) ++ depAnalysisPhase
lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
- lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
- lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
- lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+ lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, nme.equalsNumNum)
+ lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, nme.equalsNumChar)
+ lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, nme.equalsNumObject)
/** We could get away with excluding BoxedBooleanClass for the
* purpose of equality testing since it need not compare equal
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 56e05cdc04..4ab0eb0129 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -3,13 +3,12 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ArrayBuffer }
+import mutable.{ ListBuffer, ArrayBuffer }
import util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
@@ -17,23 +16,83 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ settings, log, nme }
+ import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
+
+ object NoBasicBlock extends BasicBlock(-1, null)
/** This class represents a basic block. Each
* basic block contains a list of instructions that are
* either executed all, or none. No jumps
* to/from the "middle" of the basic block are allowed (modulo exceptions).
*/
- class BasicBlock(val label: Int, val method: IMethod)
- extends AnyRef
- with ProgramPoint[BasicBlock]
- with Seq[Instruction] {
+ class BasicBlock(val label: Int, val method: IMethod) extends ProgramPoint[BasicBlock] {
+ outer =>
import BBFlags._
def code = method.code
+ private final class SuccessorList() {
+ private var successors: List[BasicBlock] = Nil
+ private def updateConserve() {
+ var lb: ListBuffer[BasicBlock] = null
+ var matches = 0
+ var remaining = successors
+
+ def addBlock(bb: BasicBlock) {
+ if (matches < 0)
+ lb += bb
+ else if (remaining.isEmpty || bb != remaining.head) {
+ lb = ListBuffer[BasicBlock]() ++= (successors take matches) += bb
+ matches = -1
+ }
+ else {
+ matches += 1
+ remaining = remaining.tail
+ }
+ }
+
+ // exceptionSuccessors
+ method.exh foreach { handler =>
+ if (handler covers outer)
+ addBlock(handler.startBlock)
+ }
+ // directSuccessors
+ val direct = directSuccessors
+ direct foreach addBlock
+
+ /** Return a list of successors for 'b' that come from exception handlers
+ * covering b's (non-exceptional) successors. These exception handlers
+ * might not cover 'b' itself. This situation corresponds to an
+ * exception being thrown as the first thing of one of b's successors.
+ */
+ method.exh foreach { handler =>
+ direct foreach { block =>
+ if (handler covers block)
+ addBlock(handler.startBlock)
+ }
+ }
+ // Blocks did not align: create a new list.
+ if (matches < 0)
+ successors = lb.toList
+ // Blocks aligned, but more blocks remain. Take a prefix of the list.
+ else if (remaining.nonEmpty)
+ successors = successors take matches
+ // Otherwise the list is unchanged, leave it alone.
+ }
+
+ /** This is called millions of times: it is performance sensitive. */
+ def updateSuccs() {
+ if (isEmpty) {
+ if (successors.nonEmpty)
+ successors = Nil
+ }
+ else updateConserve()
+ }
+ def toList = successors
+ }
+
/** Flags of this basic block. */
private var flags: Int = 0
@@ -76,20 +135,23 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS | DIRTYPREDS)
/** Cached predecessors. */
- var preds: List[BasicBlock] = null
+ var preds: List[BasicBlock] = Nil
/** Local variables that are in scope at entry of this basic block. Used
* for debugging information.
*/
- var varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
+ val varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
/** ICode instructions, used as temporary storage while emitting code.
* Once closed is called, only the `instrs` array should be used.
*/
private var instructionList: List[Instruction] = Nil
-
private var instrs: Array[Instruction] = _
- override def toList: List[Instruction] =
+
+ def take(n: Int): Seq[Instruction] =
+ if (closed) instrs take n else instructionList takeRight n reverse
+
+ def toList: List[Instruction] =
if (closed) instrs.toList else instructionList.reverse
/** Return an iterator over the instructions in this basic block. */
@@ -117,17 +179,37 @@ trait BasicBlocks {
}
/** Apply a function to all the instructions of the block. */
- override def foreach[U](f: Instruction => U) = {
- // !!! This appears to change behavior if I try to avoid the implicit
- // conversion and traverse the array directly, which presumably means it
- // is dependent on some mutation which is taking place during traversal.
- // Please eliminate this if humanly possible.
+ final def foreach[U](f: Instruction => U) = {
if (!closed) dumpMethodAndAbort(method, this)
else instrs foreach f
+
+ // !!! If I replace "instrs foreach f" with the following:
+ // var i = 0
+ // val len = instrs.length
+ // while (i < len) {
+ // f(instrs(i))
+ // i += 1
+ // }
+ //
+ // Then when compiling under -optimise, quick.plugins fails as follows:
+ //
+ // quick.plugins:
+ // [mkdir] Created dir: /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] Compiling 5 files to /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] error: java.lang.VerifyError: (class: scala/tools/nsc/typechecker/Implicits$ImplicitSearch, method: typedImplicit0 signature: (Lscala/tools/nsc/typechecker/Implicits$ImplicitInfo;Z)Lscala/tools/nsc/typechecker/Implicits$SearchResult;) Incompatible object argument for function call
+ // [scalacfork] at scala.tools.nsc.typechecker.Implicits$class.inferImplicit(Implicits.scala:67)
+ // [scalacfork] at scala.tools.nsc.Global$$anon$1.inferImplicit(Global.scala:419)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.wrapImplicit$1(Typers.scala:170)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.inferView(Typers.scala:174)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:963)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4378)
+ //
+ // This is bad and should be understood/eliminated.
}
/** The number of instructions in this basic block so far. */
def length = if (closed) instrs.length else instructionList.length
+ def size = length
/** Return the n-th instruction. */
def apply(n: Int): Instruction =
@@ -208,7 +290,7 @@ trait BasicBlocks {
*/
def removeLastInstruction() {
if (closed)
- removeInstructionsAt(size)
+ removeInstructionsAt(length)
else {
instructionList = instructionList.tail
code.touched = true
@@ -321,10 +403,11 @@ trait BasicBlocks {
def clear() {
instructionList = Nil
instrs = null
- preds = null
+ preds = Nil
}
- override def isEmpty = instructionList.isEmpty
+ final def isEmpty = instructionList.isEmpty
+ final def nonEmpty = !isEmpty
/** Enter ignore mode: new 'emit'ted instructions will not be
* added to this basic block. It makes the generation of THROW
@@ -341,33 +424,33 @@ trait BasicBlocks {
/** Return the last instruction of this basic block. */
def lastInstruction =
- if (closed) instrs.last
+ if (closed) instrs(instrs.length - 1)
else instructionList.head
def firstInstruction =
if (closed) instrs(0)
else instructionList.last
+ def exceptionSuccessors: List[BasicBlock] =
+ exceptionSuccessorsForBlock(this)
+
def exceptionSuccessorsForBlock(block: BasicBlock): List[BasicBlock] =
method.exh collect { case x if x covers block => x.startBlock }
/** Cached value of successors. Must be recomputed whenever a block in the current method is changed. */
- private var succs: List[BasicBlock] = Nil
- private def updateSuccs() {
- resetFlag(DIRTYSUCCS)
- succs =
- if (isEmpty) Nil
- else exceptionSuccessors ++ directSuccessors ++ indirectExceptionSuccessors
- }
+ private val succs = new SuccessorList
- def successors : List[BasicBlock] = {
- if (touched) updateSuccs()
- succs
+ def successors: List[BasicBlock] = {
+ if (touched) {
+ succs.updateSuccs()
+ resetFlag(DIRTYSUCCS)
+ }
+ succs.toList
}
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => List(whereto)
+ case JUMP(whereto) => whereto :: Nil
case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
case SWITCH(_, labels) => labels
@@ -379,17 +462,6 @@ trait BasicBlocks {
else Nil
}
- def exceptionSuccessors: List[BasicBlock] =
- exceptionSuccessorsForBlock(this)
-
- /** Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
- */
- def indirectExceptionSuccessors: List[BasicBlock] =
- directSuccessors flatMap exceptionSuccessorsForBlock distinct
-
/** Returns the predecessors of this block. */
def predecessors: List[BasicBlock] = {
if (hasFlag(DIRTYPREDS)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 1880bdc52c..ffc6640743 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -3,13 +3,11 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable, generic }
-import util.{ Position, NoPosition }
+import scala.collection.{ mutable, immutable }
/**
* Exception handlers are pieces of code that `handle` exceptions on
@@ -21,10 +19,10 @@ import util.{ Position, NoPosition }
trait ExceptionHandlers {
self: ICodes =>
- import global.{ definitions, Symbol, NoSymbol }
+ import global._
import definitions.{ ThrowableClass }
- class ExceptionHandler(val method: IMethod, val label: String, val cls: Symbol, val pos: Position) {
+ class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
private var _startBlock: BasicBlock = _;
var finalizer: Finalizer = _;
@@ -69,12 +67,12 @@ trait ExceptionHandlers {
def dup: ExceptionHandler = new ExceptionHandler(this)
}
- class Finalizer(method: IMethod, label: String, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
+ class Finalizer(method: IMethod, label: TermName, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
- object NoFinalizer extends Finalizer(null, "<no finalizer>", NoPosition) {
+ object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
override def dup = this
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index e26a0d59e8..803bd05031 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -102,7 +102,7 @@ abstract class GenICode extends SubComponent {
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
debuglog("Entering method " + name)
val m = new IMethod(tree.symbol)
- m.sourceFile = unit.source.toString()
+ m.sourceFile = unit.source
m.returnType = if (tree.symbol.isConstructor) UNIT
else toTypeKind(tree.symbol.info.resultType)
ctx.clazz.addMethod(m)
@@ -1078,6 +1078,7 @@ abstract class GenICode extends SubComponent {
}
caseCtx = genLoad(body, tmpCtx, generatedType)
+ // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
}
ctx1.bb.emitOnly(
@@ -1292,7 +1293,7 @@ abstract class GenICode extends SubComponent {
/** The Object => String overload.
*/
- private lazy val String_valueOf: Symbol = getMember(StringModule, "valueOf") filter (sym =>
+ private lazy val String_valueOf: Symbol = getMember(StringModule, nme.valueOf) filter (sym =>
sym.info.paramTypes match {
case List(pt) => pt.typeSymbol == ObjectClass
case _ => false
@@ -1304,7 +1305,7 @@ abstract class GenICode extends SubComponent {
// case we want to get more precise.
//
// private def valueOfForType(tp: Type): Symbol = {
- // val xs = getMember(StringModule, "valueOf") filter (sym =>
+ // val xs = getMember(StringModule, nme.valueOf) filter (sym =>
// // We always exclude the Array[Char] overload because java throws an NPE if
// // you pass it a null. It will instead find the Object one, which doesn't.
// sym.info.paramTypes match {
@@ -1351,7 +1352,7 @@ abstract class GenICode extends SubComponent {
def genScalaHash(tree: Tree, ctx: Context): Context = {
val hashMethod = {
ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
- getMember(ScalaRunTimeModule, "hash")
+ getMember(ScalaRunTimeModule, nme.hash_)
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
@@ -1715,7 +1716,7 @@ abstract class GenICode extends SubComponent {
do {
changed = false
n += 1
- method.code.blocks foreach prune0
+ method.blocks foreach prune0
} while (changed)
debuglog("Prune fixpoint reached in " + n + " iterations.");
@@ -1923,7 +1924,7 @@ abstract class GenICode extends SubComponent {
val ctx1 = new Context(this) setMethod(m)
ctx1.labels = mutable.HashMap()
ctx1.method.code = new Code(m)
- ctx1.bb = ctx1.method.code.startBlock
+ ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
ctx1.enterScope
@@ -1931,11 +1932,12 @@ abstract class GenICode extends SubComponent {
}
/** Return a new context for a new basic block. */
- def newBlock: Context = {
+ def newBlock(): Context = {
val block = method.code.newBlock
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
- block.varsInScope = mutable.HashSet() ++= scope.varsInScope
+ block.varsInScope.clear()
+ block.varsInScope ++= scope.varsInScope
new Context(this) setBasicBlock block
}
@@ -1957,7 +1959,7 @@ abstract class GenICode extends SubComponent {
*/
private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
handlerCount += 1
- val exh = new ExceptionHandler(method, "" + handlerCount, cls, pos)
+ val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 7a0017944b..631b71d83a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -84,15 +84,16 @@ abstract class ICodes extends AnyRef
def checkValid(m: IMethod) {
// always slightly dicey to iterate over mutable structures
- val bs = m.code.blocks.toList
- for (b <- bs ; if !b.closed) {
- // Something is leaving open/empty blocks around (see SI-4840) so
- // let's not kill the deal unless it's nonempty.
- if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
- m.code removeBlock b
+ m foreachBlock { b =>
+ if (!b.closed) {
+ // Something is leaving open/empty blocks around (see SI-4840) so
+ // let's not kill the deal unless it's nonempty.
+ if (b.isEmpty) {
+ log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ m.code removeBlock b
+ }
+ else dumpMethodAndAbort(m, b)
}
- else dumpMethodAndAbort(m, b)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 1978a23d90..f71c8de449 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -36,7 +36,7 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.code.startBlock;
+ val b = m.startBlock;
blocks = Nil;
run {
@@ -106,7 +106,7 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = {
blocks = Nil;
- dfs(m.code.startBlock);
+ dfs(m.startBlock);
m.exh foreach (b => dfs(b.startBlock));
blocks.reverse
@@ -150,14 +150,14 @@ trait Linearizers {
added.clear;
m.exh foreach (b => rpo(b.startBlock));
- rpo(m.code.startBlock);
+ rpo(m.startBlock);
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
- if (m.code.startBlock.predecessors eq Nil)
+ if (m.startBlock.predecessors eq Nil)
blocks
else
- m.code.startBlock :: (blocks.filterNot(_ == m.code.startBlock))
+ m.startBlock :: (blocks.filterNot(_ == m.startBlock))
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -195,7 +195,7 @@ trait Linearizers {
* the last instruction being a jump).
*/
class DumpLinearizer extends Linearizer {
- def linearize(m: IMethod): List[BasicBlock] = m.code.blocks.toList
+ def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
@@ -250,7 +250,7 @@ trait Linearizers {
* @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
*/
def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.code.startBlock, method)
+ assert(blocks.head == method.startBlock, method)
// blocks before the try, and blocks for the try
val beforeAndTry = new ListBuffer[BasicBlock]()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 2f43d43bdd..2668e7f29f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,13 +3,13 @@
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
+import util.{ SourceFile, NoSourceFile }
import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
@@ -17,30 +17,34 @@ trait ReferenceEquality {
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
}
-trait Members { self: ICodes =>
+trait Members {
+ self: ICodes =>
+
import global._
+
+ object NoCode extends Code(null, "NoCode") {
+ override def blocksList: List[BasicBlock] = Nil
+ }
/**
* This class represents the intermediate code of a method or
* other multi-block piece of code, like exception handlers.
*/
- class Code(label: String, method: IMethod) {
- def this(method: IMethod) = this(method.symbol.simpleName.toString, method)
-
+ class Code(method: IMethod, name: String) {
+ def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
/** The set of all blocks */
val blocks = mutable.ListBuffer[BasicBlock]()
/** The start block of the method */
- var startBlock: BasicBlock = null
-
- /** The stack produced by this method */
- var producedStack: TypeStack = null
+ var startBlock: BasicBlock = NoBasicBlock
private var currentLabel: Int = 0
private var _touched = false
- def blockCount = blocks.size
- def instructionCount = blocks map (_.length) sum
+ def blocksList: List[BasicBlock] = blocks.toList
+ def instructions = blocksList flatMap (_.iterator)
+ def blockCount = blocks.size
+ def instructionCount = blocks map (_.length) sum
def touched = _touched
def touched_=(b: Boolean): Unit = {
@@ -73,7 +77,7 @@ trait Members { self: ICodes =>
}
/** This methods returns a string representation of the ICode */
- override def toString() : String = "ICode '" + label + "'";
+ override def toString = "ICode '" + name + "'";
/* Compute a unique new label */
def nextLabel: Int = {
@@ -83,7 +87,7 @@ trait Members { self: ICodes =>
/* Create a new block and append it to the list
*/
- def newBlock: BasicBlock = {
+ def newBlock(): BasicBlock = {
touched = true
val block = new BasicBlock(nextLabel, method);
blocks += block;
@@ -134,6 +138,8 @@ trait Members { self: ICodes =>
/** Represent a field in ICode */
class IField(val symbol: Symbol) extends IMember { }
+
+ object NoIMethod extends IMethod(NoSymbol) { }
/**
* Represents a method in ICode. Local variables contain
@@ -146,14 +152,23 @@ trait Members { self: ICodes =>
* finished (GenICode does that).
*/
class IMethod(val symbol: Symbol) extends IMember {
- var code: Code = null
+ var code: Code = NoCode
+
+ def newBlock() = code.newBlock
+ def startBlock = code.startBlock
+ def lastBlock = blocks.last
+ def blocks = code.blocksList
+ def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
+
+ def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
+ def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
+
var native = false
/** The list of exception handlers, ordered from innermost to outermost. */
var exh: List[ExceptionHandler] = Nil
- var sourceFile: String = _
+ var sourceFile: SourceFile = NoSourceFile
var returnType: TypeKind = _
-
var recursive: Boolean = false
/** local variables and method parameters */
@@ -162,7 +177,8 @@ trait Members { self: ICodes =>
/** method parameters */
var params: List[Local] = Nil
- def hasCode = code != null
+ // TODO - see how null is stil arriving here
+ def hasCode = (code ne NoCode) && (code ne null)
def setCode(code: Code): IMethod = {
this.code = code;
this
@@ -200,12 +216,14 @@ trait Members { self: ICodes =>
import opcodes._
def checkLocals(): Unit = {
- def localsSet = code.blocks.flatten collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- } toSet
+ def localsSet = (code.blocks flatMap { bb =>
+ bb.iterator collect {
+ case LOAD_LOCAL(l) => l
+ case STORE_LOCAL(l) => l
+ }
+ }).toSet
- if (code != null) {
+ if (hasCode) {
log("[checking locals of " + this + "]")
locals filterNot localsSet foreach { l =>
log("Local " + l + " is not declared in " + this)
@@ -219,7 +237,7 @@ trait Members { self: ICodes =>
*
* This method should be most effective after heavy inlining.
*/
- def normalize(): Unit = if (this.code ne null) {
+ def normalize(): Unit = if (this.hasCode) {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index ff4abbb757..4ea253d29d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -82,7 +82,7 @@ trait Printers { self: ICodes =>
if (!m.isAbstractMethod) {
println(" {")
println("locals: " + m.locals.mkString("", ", ", ""))
- println("startBlock: " + m.code.startBlock)
+ println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
println
lin.linearize(m) foreach printBlock
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 45ab7ae43c..ba4b250303 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -21,6 +21,8 @@ trait TypeStacks {
* stack of the ICode.
*/
type Rep = List[TypeKind]
+
+ object NoTypeStack extends TypeStack(Nil) { }
class TypeStack(var types: Rep) {
if (types.nonEmpty)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 5af5b05682..229bbceb36 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -194,9 +194,9 @@ abstract class CopyPropagation {
this.method = m
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = lattice.bottom
out(b) = lattice.bottom
assert(out.contains(b))
@@ -207,21 +207,21 @@ abstract class CopyPropagation {
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.code.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
}
}
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.foldLeft(in)(interpret)
+ b.iterator.foldLeft(in)(interpret)
import opcodes._
@@ -520,8 +520,8 @@ abstract class CopyPropagation {
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
- val bindings = mutable.HashMap[Symbol, Value]()
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -562,13 +562,12 @@ abstract class CopyPropagation {
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
- final override def toString(): String = {
- var res = ""
- for (b <- this.method.code.blocks.toList)
- res = (res + "\nIN(" + b.label + "):\t Bindings: " + in(b).bindings +
- "\nIN(" + b.label +"):\t Stack: " + in(b).stack) + "\n";
- res
- }
+ final override def toString() = (
+ method.blocks map { b =>
+ "\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
+ "\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
+ } mkString
+ )
} /* class CopyAnalysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index c656219dc8..49f5b51d51 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -33,10 +33,9 @@ abstract class Liveness {
final class LivenessAnalysis extends DataFlowAnalysis[livenessLattice.type] {
type P = BasicBlock
- val lattice = livenessLattice
- var method: IMethod = _
-
- val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
+ val lattice = livenessLattice
+ var method: IMethod = _
+ val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
val kill: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
def init(m: IMethod) {
@@ -44,14 +43,15 @@ abstract class Liveness {
gen.clear()
kill.clear()
- for (b <- m.code.blocks; (g, k) = genAndKill(b)) {
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
gen += (b -> g)
kill += (b -> k)
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
@@ -75,7 +75,7 @@ abstract class Liveness {
override def run() {
backwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
@@ -89,29 +89,14 @@ abstract class Liveness {
* liveness *before* the given instruction `i`.
*/
def interpret(out: lattice.Elem, i: Instruction): lattice.Elem = {
- var in = out
-
- if (settings.debug.value) {
- log("- " + i)
- log("out: " + out)
- log("\n")
- }
-
+ debuglog("- " + i + "\nout: " + out + "\n")
i match {
- case LOAD_LOCAL(l) => in += l
- case STORE_LOCAL(l) => in -= l
- case _ =>
- ()
- }
- in
- } /* def interpret */
-
- override def toString(): String = {
- val buf = new StringBuilder()
- for (b <- method.code.blocks.toList) {
- buf.append("\nlive-in(" + b + ")=" + in(b) + "\nlive-out(" + b + ")=" + out(b));
+ case LOAD_LOCAL(l) => out + l
+ case STORE_LOCAL(l) => out - l
+ case _ => out
}
- buf.toString()
}
+ override def toString() =
+ method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b))) mkString
} /* Liveness analysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index eac714f999..c06bd2e097 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -78,7 +78,10 @@ abstract class ReachingDefinitions {
drops.clear()
outStack.clear()
- for (b <- m.code.blocks.toList; (g, k) = genAndKill(b); (d, st) = dropsAndGen(b)) {
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
+ val (d, st) = dropsAndGen(b)
+
gen += (b -> g)
kill += (b -> k)
drops += (b -> d)
@@ -86,8 +89,8 @@ abstract class ReachingDefinitions {
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
@@ -141,7 +144,7 @@ abstract class ReachingDefinitions {
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index a34d269cc9..937b0bdc3d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -110,16 +110,16 @@ abstract class TypeFlowAnalysis {
this.method = m
//typeFlowLattice.lubs = 0
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = typeFlowLattice.bottom
out(b) = typeFlowLattice.bottom
}
// start block has var bindings for each of its parameters
val entryBindings = new VarBinding ++= (m.params map (p => ((p, p.kind))))
- in(m.code.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
+ in(m.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
m.exh foreach { e =>
in(e.startBlock) = lattice.IState(in(e.startBlock).vars, typeStackLattice.exceptionHandlerStack)
@@ -132,16 +132,18 @@ abstract class TypeFlowAnalysis {
if (this.method == null || this.method.symbol != m.symbol)
init(m)
else reinit {
- for (b <- m.code.blocks; if !in.isDefinedAt(b)) {
- for (p <- b.predecessors) {
- if (out.isDefinedAt(p)) {
- in(b) = out(p)
- worklist += p
- }
-/* else
- in(b) = typeFlowLattice.bottom
-*/ }
- out(b) = typeFlowLattice.bottom
+ m foreachBlock { b =>
+ if (!in.contains(b)) {
+ for (p <- b.predecessors) {
+ if (out.isDefinedAt(p)) {
+ in(b) = out(p)
+ worklist += p
+ }
+ /* else
+ in(b) = typeFlowLattice.bottom
+ */ }
+ out(b) = typeFlowLattice.bottom
+ }
}
for (handler <- m.exh) {
val start = handler.startBlock
@@ -164,7 +166,7 @@ abstract class TypeFlowAnalysis {
forwardAnalysis(blockTransfer)
val t = timer.stop
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
@@ -174,7 +176,7 @@ abstract class TypeFlowAnalysis {
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- b.foldLeft(in)(interpret)
+ b.iterator.foldLeft(in)(interpret)
}
/** The flow function of a given basic block. */
/* var flowFun: immutable.Map[BasicBlock, TransferFunction] = new immutable.HashMap */
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
index 301dbd18d6..e7cf716add 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
@@ -23,15 +23,10 @@ trait GenAndroid {
* `Parcelable` interface must also have a static field called `CREATOR`,
* which is an object implementing the `Parcelable.Creator` interface.
*/
- private val fieldName = "CREATOR"
+ private val fieldName = newTermName("CREATOR")
- private lazy val AndroidParcelableInterface =
- try definitions.getClass("android.os.Parcelable")
- catch { case _: FatalError => NoSymbol }
-
- private lazy val AndroidCreatorClass =
- if (AndroidParcelableInterface == NoSymbol) NoSymbol
- else definitions.getClass("android.os.Parcelable$Creator")
+ private lazy val AndroidParcelableInterface = definitions.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = definitions.getClassIfDefined("android.os.Parcelable$Creator")
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index e80927f620..03d1bc3ad2 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -3,7 +3,6 @@
* @author Iulian Dragos
*/
-
package scala.tools.nsc
package backend.jvm
@@ -13,6 +12,7 @@ import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.reflect.SigParser
import scala.tools.nsc.symtab._
+import scala.tools.nsc.util.{ SourceFile, NoSourceFile }
import scala.reflect.internal.ClassfileConstants._
import ch.epfl.lamp.fjbg._
import JAccessFlags._
@@ -172,7 +172,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val PublicStatic = ACC_PUBLIC | ACC_STATIC
val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
- val StringBuilderClassName = definitions.StringBuilderClass.fullName
+ val StringBuilderClassName = javaName(definitions.StringBuilderClass)
val BoxesRunTime = "scala.runtime.BoxesRunTime"
val StringBuilderType = new JObjectType(StringBuilderClassName)
@@ -183,10 +183,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
// Scala attributes
- val BeanInfoAttr = definitions.getClass("scala.beans.BeanInfo")
- val BeanInfoSkipAttr = definitions.getClass("scala.beans.BeanInfoSkip")
- val BeanDisplayNameAttr = definitions.getClass("scala.beans.BeanDisplayName")
- val BeanDescriptionAttr = definitions.getClass("scala.beans.BeanDescription")
+ val BeanInfoAttr = definitions.getRequiredClass("scala.beans.BeanInfo")
+ val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
+ val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
+ val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
+
+ final val ExcludedForwarderFlags = {
+ import Flags._
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | BridgeAndPrivateFlags )
+ }
// Additional interface parents based on annotations and other cues
def newParentForAttr(attr: Symbol): Option[Type] = attr match {
@@ -291,7 +296,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(jclass.getName()) =>
+ case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
val scalaAttr =
fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
versionPickle.bytes, versionPickle.writeIndex)
@@ -355,7 +360,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (isTopLevelModule(c.symbol)) {
if (c.symbol.companionClass == NoSymbol)
- generateMirrorClass(c.symbol, c.cunit.source.toString)
+ generateMirrorClass(c.symbol, c.cunit.source)
else
log("No mirror class for module with linked class: " +
c.symbol.fullName)
@@ -754,7 +759,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
null
else {
val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(outerName)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
else outerName
}
}
@@ -925,8 +930,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
mopt match {
case Some(m) =>
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
if (isStaticModule(clasz.symbol)) {
@@ -1044,32 +1049,20 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val className = jclass.getName
val linkedClass = moduleClass.companionClass
val linkedModule = linkedClass.companionSymbol
+ lazy val conflictingNames: Set[Name] = {
+ linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
+ }
+ debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
- /** There was a bit of a gordian logic knot here regarding forwarders.
- * All we really have to do is exclude certain categories of symbols and
- * then all matching names.
- */
- def memberNames(sym: Symbol) = sym.info.members map (_.name.toString) toSet
- lazy val membersInCommon =
- memberNames(linkedModule) intersect memberNames(linkedClass)
-
- /** Should method `m` get a forwarder in the mirror class? */
- def shouldForward(m: Symbol): Boolean = (
- m.owner != ObjectClass
- && m.isMethod
- && m.isPublic
- && !m.hasFlag(Flags.CASE | Flags.DEFERRED | Flags.SPECIALIZED | Flags.LIFTED)
- && !m.isConstructor
- && !m.isStaticMember
- && !membersInCommon(m.name.toString)
- )
-
- for (m <- moduleClass.info.nonPrivateMembers) {
- if (shouldForward(m)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
+ debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
+ else if (conflictingNames(m.name))
+ log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
addForwarder(jclass, moduleClass, m)
}
- else debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
}
}
@@ -1079,7 +1072,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* generated if there is no companion class: if there is, an attempt will
* instead be made to add the forwarder methods to the companion class.
*/
- def generateMirrorClass(clasz: Symbol, sourceFile: String) {
+ def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
import JAccessFlags._
val moduleName = javaName(clasz) // + "$"
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
@@ -1087,7 +1080,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
mirrorName,
JAVA_LANG_OBJECT.getName,
JClass.NO_INTERFACES,
- sourceFile)
+ "" + sourceFile)
log("Dumping mirror class for '%s'".format(mirrorClass.getName))
addForwarders(mirrorClass, clasz)
@@ -1199,7 +1192,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
hostSymbol.info ; methodOwner.info
def isInterfaceCall(sym: Symbol) = (
- sym.isInterface
+ sym.isInterface && methodOwner != ObjectClass
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
@@ -1901,7 +1894,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (sym.isInterface) ACC_INTERFACE else 0,
if (finalFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge || sym.hasFlag(Flags.MIXEDIN) && sym.isMethod) ACC_BRIDGE else 0,
+ if (sym.isBridge) ACC_BRIDGE else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
if (sym.isVarargsMethod) ACC_VARARGS else 0
)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
index acaf1f6cc2..93d3d19ac8 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -33,11 +33,11 @@ trait GenJVMUtil {
)
// Don't put this in per run caches.
- private val javaNameCache = new mutable.WeakHashMap[Symbol, String]() ++= List(
- NothingClass -> RuntimeNothingClass.fullName('/'),
- RuntimeNothingClass -> RuntimeNothingClass.fullName('/'),
- NullClass -> RuntimeNullClass.fullName('/'),
- RuntimeNullClass -> RuntimeNullClass.fullName('/')
+ private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
)
/** This trait may be used by tools who need access to
@@ -70,7 +70,6 @@ trait GenJVMUtil {
def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
/** Return the a name of this symbol that can be used on the Java
* platform. It removes spaces from names.
*
@@ -86,11 +85,13 @@ trait GenJVMUtil {
*/
def javaName(sym: Symbol): String =
javaNameCache.getOrElseUpdate(sym, {
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- })
+ sym.name.newName(
+ if (sym.isClass || (sym.isModule && !sym.isMethod))
+ sym.javaBinaryName
+ else
+ sym.javaSimpleName
+ )
+ }).toString
def javaType(t: TypeKind): JType = (t: @unchecked) match {
case UNIT => JType.VOID
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index e208f5a8da..d2e54ff3f1 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -124,8 +124,8 @@ abstract class GenMSIL extends SubComponent {
// Scala attributes
// symtab.Definitions -> object (singleton..)
val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.getClass("scala.cloneable").tpe
- val TransientAtt = definitions.getClass("scala.transient").tpe
+ val CloneableAttr = definitions.CloneableAttr.tpe
+ val TransientAtt = definitions.TransientAttr.tpe
// remoting: the architectures are too different, no mapping (no portable code
// possible)
@@ -1898,8 +1898,8 @@ abstract class GenMSIL extends SubComponent {
val sc = iclass.lookupStaticCtor
if (sc.isDefined) {
val m = sc.get
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
// call object's private ctor from static ctor
lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 3e921cf472..e8abee7d06 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -94,12 +94,12 @@ abstract class ClosureElimination extends SubComponent {
import copyPropagation._
/* Some embryonic copy propagation. */
- def analyzeMethod(m: IMethod): Unit = try {if (m.code ne null) {
+ def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
log("Analyzing " + m)
cpp.init(m)
cpp.run
- for (bb <- linearizer.linearize(m)) {
+ m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
debuglog("Cpp info at entry to block " + bb + ": " + info)
@@ -201,28 +201,25 @@ abstract class ClosureElimination extends SubComponent {
/** Peephole optimization. */
abstract class PeepholeOpt {
- private var method: IMethod = null
+ private var method: IMethod = NoIMethod
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
- def apply(m: IMethod): Unit = if (m.code ne null) {
+ def apply(m: IMethod): Unit = if (m.hasCode) {
method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
liveness.run
- for (b <- m.code.blocks)
- transformBlock(b)
+ m foreachBlock transformBlock
}
def transformBlock(b: BasicBlock): Unit = if (b.size >= 2) {
- var newInstructions: List[Instruction] = Nil
-
- newInstructions = b.toList
-
+ var newInstructions: List[Instruction] = b.toList
var redo = false
+
do {
var h = newInstructions.head
var t = newInstructions.tail
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 64df3b4636..5fc7329955 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -72,7 +72,7 @@ abstract class DeadCodeElimination extends SubComponent {
val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
- if (m.code ne null) {
+ if (m.hasCode) {
log("dead code elimination on " + m);
dropOf.clear()
m.code.blocks.clear()
@@ -90,12 +90,12 @@ abstract class DeadCodeElimination extends SubComponent {
}
/** collect reaching definitions and initial useful instructions for this method. */
- def collectRDef(m: IMethod): Unit = if (m.code ne null) {
+ def collectRDef(m: IMethod): Unit = if (m.hasCode) {
defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
rdef.init(m);
rdef.run;
- for (bb <- m.code.blocks.toList) {
+ m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
@@ -184,7 +184,7 @@ abstract class DeadCodeElimination extends SubComponent {
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
- for (bb <- m.code.blocks.toList) {
+ m foreachBlock { bb =>
// Console.println("** Sweeping block " + bb + " **")
val oldInstr = bb.toList
bb.open
@@ -223,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent {
private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
- for (bb <- m.code.blocks) {
+ m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
for ((i, idx) <- bb.toList.zipWithIndex) {
if (!useful(bb)(idx)) {
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index 1d971beae4..a37a3406a8 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -79,7 +79,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
/* Type Flow Analysis */
private val tfa: analysis.MethodTFA = new analysis.MethodTFA()
private var tfaCache: Map[Int, tfa.lattice.Elem] = Map.empty
- private var analyzedMethod: IMethod = null
+ private var analyzedMethod: IMethod = NoIMethod
/* Blocks that need to be analyzed */
private var todoBlocks: List[BasicBlock] = Nil
@@ -110,7 +110,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
* inlined blocks, so worst case scenario we double the size of the code
*/
private def applyMethod(method: IMethod): Unit = {
- if (method.code ne null) {
+ if (method.hasCode) {
// create the list of starting blocks
todoBlocks = global.icodes.linearizer.linearize(method)
@@ -127,7 +127,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
todoBlocks = Nil
// Type flow analysis cleanup
- analyzedMethod = null
+ analyzedMethod = NoIMethod
tfaCache = Map.empty
//TODO: Need a way to clear tfa structures
}
@@ -151,7 +151,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
* - we change the THROW exception to the new Clear stack + JUMP code
*/
for {
- (instr @ THROW(clazz), index) <- bblock.zipWithIndex
+ (instr @ THROW(clazz), index) <- bblock.iterator.zipWithIndex
// Decide if any handler fits this exception
// If not, then nothing to do, we cannot determine statically which handler will catch the exception
(handler, caughtException) <- findExceptionHandler(toTypeKind(clazz.tpe), bblock.exceptionSuccessors)
@@ -181,7 +181,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
if (!canReplaceHandler) {
currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
- " block:\n" + bblock.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
+ " block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
"before instruction index " + index)
}
else {
@@ -261,13 +261,13 @@ abstract class InlineExceptionHandlers extends SubComponent {
private def getTypesAtBlockEntry(bblock: BasicBlock): tfa.lattice.Elem = {
// lazily perform tfa, because it's expensive
// cache results by block label, as rewriting the code messes up the block's hashCode
- if (analyzedMethod eq null) {
+ if (analyzedMethod eq NoIMethod) {
analyzedMethod = bblock.method
tfa.init(bblock.method)
tfa.run
log(" performed tfa on method: " + bblock.method)
- for (block <- bblock.method.code.blocks.sortBy(_.label))
+ for (block <- bblock.method.blocks.sortBy(_.label))
tfaCache += block.label -> tfa.in(block)
}
@@ -360,7 +360,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
val copy = handler.code.newBlock
- copy.emitOnly(handler drop dropCount: _*)
+ copy.emitOnly(handler.iterator drop dropCount toSeq: _*)
// extend the handlers of the handler to the copy
for (parentHandler <- handler.method.exh ; if parentHandler covers handler) {
@@ -382,7 +382,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
case _ =>
currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
- handler.mkString("\n"))
+ handler.iterator.mkString("\n"))
None
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 74eb450960..e3d21011d1 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -9,6 +9,7 @@ package backend.opt
import scala.collection.mutable
import scala.tools.nsc.symtab._
+import scala.tools.nsc.util.{ NoSourceFile }
/**
* @author Iulian Dragos
@@ -84,9 +85,9 @@ abstract class Inliners extends SubComponent {
/* fresh name counter */
val fresh = perRunCaches.newMap[String, Int]() withDefaultValue 0
- def freshName(s: String) = {
+ def freshName(s: String): TermName = {
fresh(s) += 1
- s + fresh(s)
+ newTermName(s + fresh(s))
}
private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
@@ -111,8 +112,8 @@ abstract class Inliners extends SubComponent {
private val inlinedMethodCount = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
def analyzeMethod(m: IMethod): Unit = {
- var sizeBeforeInlining = if (m.code ne null) m.code.blockCount else 0
- var instrBeforeInlining = if (m.code ne null) m.code.instructionCount else 0
+ var sizeBeforeInlining = if (m.hasCode) m.code.blockCount else 0
+ var instrBeforeInlining = if (m.hasCode) m.code.instructionCount else 0
var retry = false
var count = 0
fresh.clear()
@@ -210,11 +211,11 @@ abstract class Inliners extends SubComponent {
if (caller.inline) {
log("Not inlining into " + caller.sym.originalName.decode + " because it is marked @inline.")
}
- else if (caller.hasCode) {
+ else if (caller.m.hasCode) {
log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
tfa init m
tfa.run
- caller.linearized foreach { bb =>
+ caller.m.linearizedBlocks() foreach { bb =>
info = tfa in bb
breakable {
@@ -248,10 +249,9 @@ abstract class Inliners extends SubComponent {
}
private def isMonadicMethod(sym: Symbol) = {
- val (origName, _, _) = nme.splitSpecializedName(sym.name)
- origName match {
+ nme.unspecializedName(sym.name) match {
case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
- case _ => false
+ case _ => false
}
}
@@ -312,18 +312,16 @@ abstract class Inliners extends SubComponent {
def isMonadic = isMonadicMethod(sym)
def handlers = m.exh
- def blocks = if (m.code eq null) sys.error("blocks = null + " + m) else m.code.blocks
+ def blocks = m.blocks
def locals = m.locals
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
- def instructions = blocks.flatten
+ def instructions = m.code.instructions
def linearized = linearizer linearize m
def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
- def hasCode = m.code != null
- def hasSourceFile = m.sourceFile != null
def hasHandlers = handlers.nonEmpty
def hasNonFinalizerHandler = handlers exists {
case _: Finalizer => true
@@ -458,7 +456,7 @@ abstract class Inliners extends SubComponent {
if (retVal ne null)
caller addLocal retVal
- inc.blocks foreach { b =>
+ inc.m foreachBlock { b =>
inlinedBlock += (b -> newBlock())
inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
}
@@ -476,11 +474,11 @@ abstract class Inliners extends SubComponent {
blockEmit(STORE_LOCAL(inlinedThis))
// jump to the start block of the callee
- blockEmit(JUMP(inlinedBlock(inc.m.code.startBlock)))
+ blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
block.close
// duplicate the other blocks in the callee
- linearizer linearize inc.m foreach { bb =>
+ inc.m.linearizedBlocks() foreach { bb =>
var info = a in bb
def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
@@ -512,23 +510,23 @@ abstract class Inliners extends SubComponent {
}
def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.hasCode && shouldInline && isSafeToInline(stack)
+ !sameSymbols && inc.m.hasCode && shouldInline && isSafeToInline(stack)
def logFailure(stack: TypeStack) = log(
"""|inline failed for %s:
| pair.sameSymbols: %s
| inc.numInlined < 2: %s
- | inc.hasCode: %s
+ | inc.m.hasCode: %s
| isSafeToInline: %s
| shouldInline: %s
""".stripMargin.format(
inc.m, sameSymbols, inc.numInlined < 2,
- inc.hasCode, isSafeToInline(stack), shouldInline
+ inc.m.hasCode, isSafeToInline(stack), shouldInline
)
)
def failureReason(stack: TypeStack) =
- if (!inc.hasCode) "bytecode was unavailable"
+ if (!inc.m.hasCode) "bytecode was unavailable"
else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
else "of a bug (run with -Ylog:inline -Ydebug for more information)"
@@ -551,14 +549,14 @@ abstract class Inliners extends SubComponent {
*/
def isSafeToInline(stack: TypeStack): Boolean = {
def makePublic(f: Symbol): Boolean =
- inc.hasSourceFile && (f.isSynthetic || f.isParamAccessor) && {
+ (inc.m.sourceFile ne NoSourceFile) && (f.isSynthetic || f.isParamAccessor) && {
debuglog("Making not-private symbol out of synthetic: " + f)
f setNotFlag Flags.PRIVATE
true
}
- if (!inc.hasCode || inc.isRecursive)
+ if (!inc.m.hasCode || inc.isRecursive)
return false
val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
@@ -611,7 +609,7 @@ abstract class Inliners extends SubComponent {
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- private def neverInline = caller.isBridge || !inc.hasCode || inc.noinline
+ private def neverInline = caller.isBridge || !inc.m.hasCode || inc.noinline
private def alwaysInline = inc.inline
def shouldInline: Boolean = !neverInline && (alwaysInline || {
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 4c7263ef69..089ef9cf35 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,8 +18,8 @@ abstract class Changes {
abstract class Change
- private lazy val annotationsChecked =
- List(definitions.getClass("scala.specialized")) // Any others that should be checked?
+ private lazy val annotationsChecked =
+ List(definitions.SpecializedClass) // Any others that should be checked?
private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD |
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 5a510803ed..9a025b0d14 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -96,7 +96,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
- reporter.info(NoPosition, "No documentation generated with unsucessful compiler run", false)
+ reporter.info(null, "No documentation generated with unsucessful compiler run", false)
case _: ClassNotFoundException =>
()
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index a3b4dc4337..7eb8c393f3 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -100,11 +100,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (inTpl == null) None else thisFactory.comment(sym, inTpl)
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
- def inDefinitionTemplates =
- if (inTpl == null)
- makeRootPackage.toList
- else
- makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ def inDefinitionTemplates = this match {
+ case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
+ mb.useCaseOf.get.inDefinitionTemplates
+ case _ =>
+ if (inTpl == null)
+ makeRootPackage.toList
+ else
+ makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ }
def visibility = {
if (sym.isPrivateLocal) PrivateInInstance()
else if (sym.isProtectedLocal) ProtectedInInstance()
@@ -119,14 +123,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else Public()
}
}
- def flags = {
+ def flags = {
val fgs = mutable.ListBuffer.empty[Paragraph]
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
- fgs.toList
+ fgs.toList
}
def deprecation =
if (sym.isDeprecated)
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 7fea76c7b1..0fea0a2d92 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -22,8 +22,8 @@ import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
-class Global(settings: Settings, reporter: Reporter, projectName: String = "")
- extends scala.tools.nsc.Global(settings, reporter)
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
+ extends scala.tools.nsc.Global(settings, _reporter)
with CompilerControl
with RangePositions
with ContextTrees
@@ -818,7 +818,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decode.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
val symtpe = pre.memberType(sym) onTypeError ErrorType
matching(sym, symtpe, this(sym.name)) match {
case Some(m) =>
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index 2b6e793c5c..b7a9c7329c 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -101,7 +101,7 @@ trait Picklers { self: Global =>
if (sym1.isOverloaded) {
val index = sym1.alternatives.indexOf(sym)
assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
- buf += index.toString
+ buf += newTermName(index.toString)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 81d4faa36e..1d78cc6e1c 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -37,7 +37,7 @@ object REPL {
reporter = new ConsoleReporter(settings)
val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else {
try {
object compiler extends Global(command.settings, reporter) {
@@ -48,7 +48,7 @@ object REPL {
return
}
if (command.shouldStopWithInfo) {
- reporter.info(null, command.getInfoMessage(compiler), true)
+ reporter.echo(command.getInfoMessage(compiler))
} else {
run(compiler)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
index 2c556656ca..d889cadf47 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
@@ -12,6 +12,7 @@ trait Dossiers {
import intp._
import intp.global._
+ import definitions._
trait Dossier {
def symbol: Symbol
@@ -34,7 +35,7 @@ trait Dossiers {
class TermDossier(val symbol: TermSymbol, val staticType: Type, val value: AnyRef) extends Dossier {
def runtimeClass: JClass = value.getClass
- def runtimeSymbol: Symbol = safeClass(runtimeClass.getName) getOrElse NoSymbol
+ def runtimeSymbol: Symbol = getClassIfDefined(runtimeClass.getName)
def runtimeType: Type = runtimeSymbol.tpe
def runtimeTypeString = TypeStrings.fromClazz(runtimeClass)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index 115cef7f00..9f5fde70d8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -15,6 +15,7 @@ trait ExprTyper {
import repl._
import replTokens.{ Tokenizer }
import global.{ reporter => _, Import => _, _ }
+ import definitions._
import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
import naming.freshInternalVarName
@@ -70,30 +71,29 @@ trait ExprTyper {
// 2) A path loadable via getModule.
// 3) Try interpreting it as an expression.
private var typeOfExpressionDepth = 0
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
+ def typeOfExpression(expr: String, silent: Boolean = true): Type = {
repltrace("typeOfExpression(" + expr + ")")
if (typeOfExpressionDepth > 2) {
repldbg("Terminating typeOfExpression recursion for expression: " + expr)
- return None
+ return NoType
}
- def asQualifiedImport = {
+ def asQualifiedImport: Type = {
val name = expr.takeWhile(_ != '.')
- importedTermNamed(name) flatMap { sym =>
- typeOfExpression(sym.fullName + expr.drop(name.length), true)
- }
+ typeOfExpression(importedTermNamed(name).fullName + expr.drop(name.length), true)
}
- def asModule = safeModule(expr) map (_.tpe)
- def asExpr = {
+ def asModule: Type = getModuleIfDefined(expr).tpe
+ def asExpr: Type = {
val lhs = freshInternalVarName()
val line = "lazy val " + lhs + " =\n" + expr
interpret(line, true) match {
case IR.Success => typeOfExpression(lhs, true)
- case _ => None
+ case _ => NoType
}
}
- def evaluate() = {
+
+ def evaluate(): Type = {
typeOfExpressionDepth += 1
try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
finally typeOfExpressionDepth -= 1
@@ -107,26 +107,27 @@ trait ExprTyper {
if (!silent)
evaluate()
- None
+ NoType
}
}
// Since people will be giving us ":t def foo = 5" even though that is not an
// expression, we have a means of typing declarations too.
- private def typeOfDeclaration(code: String): Option[Type] = {
+ private def typeOfDeclaration(code: String): Type = {
repltrace("typeOfDeclaration(" + code + ")")
val obname = freshInternalVarName()
interpret("object " + obname + " {\n" + code + "\n}\n", true) match {
case IR.Success =>
val sym = symbolOfTerm(obname)
- if (sym == NoSymbol) None else {
+ if (sym == NoSymbol) NoType else {
// TODO: bitmap$n is not marked synthetic.
val decls = sym.tpe.decls.toList filterNot (x => x.isConstructor || x.isPrivate || (x.name.toString contains "$"))
repltrace("decls: " + decls)
- decls.lastOption map (decl => typeCleanser(sym, decl.name))
+ if (decls.isEmpty) NoType
+ else typeCleanser(sym, decls.last.name)
}
case _ =>
- None
+ NoType
}
}
// def compileAndTypeExpr(expr: String): Option[Typer] = {
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index 391d5ab8ee..0dc51d5eb0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -11,6 +11,7 @@ import java.io.{ BufferedReader, FileReader }
import java.util.concurrent.locks.ReentrantLock
import scala.sys.process.Process
import session._
+import scala.util.Properties.{ jdkHome, javaVersion }
import scala.tools.util.{ Signallable, Javap }
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
@@ -51,7 +52,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp.reporter.printMessage(msg)
def isAsync = !settings.Yreplsync.value
- lazy val power = Power(this)
+ lazy val power = new Power(intp, new StdReplVals(this))
+ lazy val NoType = intp.global.NoType
// TODO
// object opt extends AestheticSettings
@@ -253,6 +255,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Power user commands */
lazy val powerCommands: List[LoopCommand] = List(
nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
+ nullary("vals", "gives information about the power mode repl vals", valsCommand),
cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
|:wrap
@@ -283,6 +286,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
history.asStrings takeRight 30 foreach echo
in.redrawLine()
}
+ private def valsCommand(): Result = power.valsDescription
private val typeTransforms = List(
"scala.collection.immutable." -> "immutable.",
@@ -375,14 +379,29 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
+ private def findToolsJar() = {
+ val jdkPath = Directory(jdkHome)
+ val jar = jdkPath / "lib" / "tools.jar" toFile;
+
+ if (jar isFile)
+ Some(jar)
+ else if (jdkPath.isDirectory)
+ jdkPath.deepFiles find (_.name == "tools.jar")
+ else None
+ }
private def addToolsJarToLoader() = {
- val javaHome = Directory(sys.env("JAVA_HOME"))
- val tools = javaHome / "lib" / "tools.jar"
- if (tools.isFile) {
- echo("Found tools.jar, adding for use by javap.")
- ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ val cl = findToolsJar match {
+ case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ case _ => intp.classLoader
+ }
+ if (Javap.isAvailable(cl)) {
+ repldbg(":javap available.")
+ cl
+ }
+ else {
+ repldbg(":javap unavailable: no tools.jar at " + jdkHome)
+ intp.classLoader
}
- else intp.classLoader
}
protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
@@ -418,9 +437,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// Still todo: modules.
private def typeCommand(line: String): Result = {
if (line.trim == "") ":type <expression>"
- else intp.typeOfExpression(line, false) match {
- case Some(tp) => intp.afterTyper(tp.toString)
- case _ => "" // the error message was already printed
+ else {
+ val tp = intp.typeOfExpression(line, false)
+ if (tp == NoType) "" // the error message was already printed
+ else intp.afterTyper(tp.toString)
}
}
private def warningsCommand(): Result = {
@@ -429,14 +449,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def javapCommand(line: String): Result = {
if (javap == null)
- return ":javap unavailable on this platform."
- if (line == "")
- return ":javap [-lcsvp] [path1 path2 ...]"
-
- javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
- else res.show()
- }
+ ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ else if (javaVersion startsWith "1.7")
+ ":javap not yet working with java 1.7"
+ else if (line == "")
+ ":javap [-lcsvp] [path1 path2 ...]"
+ else
+ javap(words(line)) foreach { res =>
+ if (res.isError) return "Failed: " + res.value
+ else res.show()
+ }
}
private def keybindingsCommand(): Result = {
if (in.keyBindings.isEmpty) "Key bindings unavailable."
@@ -465,13 +487,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
case wrapper :: Nil =>
intp.typeOfExpression(wrapper) match {
- case Some(PolyType(List(targ), MethodType(List(arg), restpe))) =>
+ case PolyType(List(targ), MethodType(List(arg), restpe)) =>
intp setExecutionWrapper intp.pathToTerm(wrapper)
"Set wrapper to '" + wrapper + "'"
- case Some(x) =>
- failMsg + "\nFound: " + x
- case _ =>
- failMsg + "\nFound: <unknown>"
+ case tp =>
+ failMsg + (
+ if (tp == g.NoType) "\nFound: <unknown>"
+ else "\nFound: <unknown>"
+ )
}
case _ => failMsg
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 861f617ed6..0f0ab69e6d 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -186,13 +186,22 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
lazy val compiler: global.type = global
import global._
- import definitions.{ ScalaPackage, JavaLangPackage, PredefModule, RootClass }
+ import definitions.{
+ ScalaPackage, JavaLangPackage, PredefModule, RootClass,
+ getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass
+ }
private implicit def privateTreeOps(t: Tree): List[Tree] = {
(new Traversable[Tree] {
def foreach[U](f: Tree => U): Unit = t foreach { x => f(x) ; () }
}).toList
}
+
+ implicit def installReplTypeOps(tp: Type): ReplTypeOps = new ReplTypeOps(tp)
+ class ReplTypeOps(tp: Type) {
+ def orElse(other: => Type): Type = if (tp ne NoType) tp else other
+ def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+ }
// TODO: If we try to make naming a lazy val, we run into big time
// scalac unhappiness with what look like cycles. It has not been easy to
@@ -201,12 +210,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val global: imain.global.type = imain.global
} with Naming {
// make sure we don't overwrite their unwisely named res3 etc.
- override def freshUserVarName(): String = {
- val name = super.freshUserVarName()
- if (definedNameMap contains name) freshUserVarName()
+ def freshUserTermName(): TermName = {
+ val name = newTermName(freshUserVarName())
+ if (definedNameMap contains name) freshUserTermName()
else name
}
- def isInternalVarName(name: Name): Boolean = isInternalVarName("" + name)
+ def isUserTermName(name: Name) = isUserVarName("" + name)
+ def isInternalTermName(name: Name) = isInternalVarName("" + name)
}
import naming._
@@ -356,7 +366,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private def mostRecentlyHandledTree: Option[Tree] = {
prevRequests.reverse foreach { req =>
req.handlers.reverse foreach {
- case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name) => return Some(x.member)
+ case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
case _ => ()
}
}
@@ -498,7 +508,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
trees.last match {
case _:Assign => // we don't want to include assignments
case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
- val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
+ val varName = if (synthetic) freshInternalVarName() else ("" + freshUserTermName())
val rewrittenLine = (
// In theory this would come out the same without the 1-specific test, but
// it's a cushion against any more sneaky parse-tree position vs. code mismatches:
@@ -640,7 +650,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def directBind(name: String, boundType: String, value: Any): IR.Result = {
val result = bind(name, boundType, value)
if (result == IR.Success)
- directlyBoundNames += name
+ directlyBoundNames += newTermName(name)
result
}
def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
@@ -648,7 +658,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def rebind(p: NamedParam): IR.Result = {
val name = p.name
- val oldType = typeOfTerm(name) getOrElse { return IR.Error }
+ val oldType = typeOfTerm(name) orElse { return IR.Error }
val newType = p.tpe
val tempName = freshInternalVarName()
@@ -663,7 +673,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
+ def bindValue(x: Any): IR.Result = bindValue("" + freshUserTermName(), x)
def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
@@ -786,7 +796,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* following accessPath into the outer one.
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = definitions.getModule(readPath) // the outermost wrapper
+ val readRoot = getRequiredModule(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot) { (sym, name) =>
if (name == "") sym else
lineAfterTyper(sym.info member newTermName(name))
@@ -1036,16 +1046,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def requestHistoryForName(name: Name): List[Request] =
prevRequests.toList.reverse filter (_.definedNames contains name)
- def safeClass(name: String): Option[Symbol] = {
- try Some(definitions.getClass(newTypeName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
- def safeModule(name: String): Option[Symbol] = {
- try Some(definitions.getModule(newTermName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
def definitionForName(name: Name): Option[MemberHandler] =
requestForName(name) flatMap { req =>
req.handlers find (_.definedNames contains name)
@@ -1057,34 +1057,32 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def classOfTerm(id: String): Option[JClass] =
valueOfTerm(id) map (_.getClass)
- def typeOfTerm(id: String): Option[Type] = newTermName(id) match {
- case nme.ROOTPKG => Some(definitions.RootClass.tpe)
- case name => requestForName(name) flatMap (_.compilerTypeOf get name)
+ def typeOfTerm(id: String): Type = newTermName(id) match {
+ case nme.ROOTPKG => definitions.RootClass.tpe
+ case name => requestForName(name) flatMap (_.compilerTypeOf get name) getOrElse NoType
}
def symbolOfTerm(id: String): Symbol =
requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
- for {
- clazz <- classOfTerm(id)
- tpe <- runtimeTypeOfTerm(id)
- nonAnon <- clazz.supers find (!_.isScalaAnonymous)
- } yield {
- (nonAnon, tpe)
+ classOfTerm(id) flatMap { clazz =>
+ clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
+ (nonAnon, runtimeTypeOfTerm(id))
+ }
}
}
- def runtimeTypeOfTerm(id: String): Option[Type] = {
- for {
- tpe <- typeOfTerm(id)
- clazz <- classOfTerm(id)
- staticSym = tpe.typeSymbol
- runtimeSym <- safeClass(clazz.getName)
- if runtimeSym != staticSym
- if runtimeSym isSubClass staticSym
+ def runtimeTypeOfTerm(id: String): Type = {
+ typeOfTerm(id) andAlso { tpe =>
+ val clazz = classOfTerm(id) getOrElse { return NoType }
+ val staticSym = tpe.typeSymbol
+ val runtimeSym = getClassIfDefined(clazz.getName)
+
+ if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+ runtimeSym.info
+ else NoType
}
- yield runtimeSym.info
}
object replTokens extends {
@@ -1096,16 +1094,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
} with ExprTyper { }
def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
+ def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- }
+
def prettyPrint(code: String) =
replTokens.prettyPrint(exprTyper tokens code)
protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
- def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalVarName
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
def definedTypes = onlyTypes(allDefinedNames)
def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
@@ -1114,35 +1112,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private def findName(name: Name) = definedSymbols find (_.name == name)
- private def missingOpt(op: => Symbol): Option[Symbol] =
- try Some(op)
- catch { case _: MissingRequirementError => None }
- private def missingWrap(op: => Symbol): Symbol =
- try op
- catch { case _: MissingRequirementError => NoSymbol }
-
- def optCompilerClass(name: String) = missingOpt(definitions.getClass(name))
- def optCompilerModule(name: String) = missingOpt(definitions.getModule(name))
- def getCompilerClass(name: String) = missingWrap(definitions.getClass(name))
- def getCompilerModule(name: String) = missingWrap(definitions.getModule(name))
-
/** Translate a repl-defined identifier into a Symbol.
*/
- def apply(name: String): Symbol = {
- val tpname = newTypeName(name)
- (
- findName(tpname)
- orElse findName(tpname.companionName)
- orElse optCompilerClass(name)
- orElse optCompilerModule(name)
- getOrElse NoSymbol
- )
- }
+ def apply(name: String): Symbol =
+ types(name) orElse terms(name)
+
def types(name: String): Symbol = {
- findName(newTypeName(name)) getOrElse getCompilerClass(name)
+ val tpname = newTypeName(name)
+ findName(tpname) getOrElse getClassIfDefined(tpname)
}
def terms(name: String): Symbol = {
- findName(newTermName(name)) getOrElse getCompilerModule(name)
+ val termname = newTypeName(name)
+ findName(termname) getOrElse getModuleIfDefined(termname)
}
/** the previous requests this interpreter has processed */
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index 10e3796404..d34ca8bbca 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -34,8 +34,9 @@ trait Imports {
def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
- def importedTerms = onlyTerms(importHandlers flatMap (_.importedNames))
- def importedTypes = onlyTypes(importHandlers flatMap (_.importedNames))
+ def allImportedNames = importHandlers flatMap (_.importedNames)
+ def importedTerms = onlyTerms(allImportedNames)
+ def importedTypes = onlyTypes(allImportedNames)
/** Types which have been wildcard imported, such as:
* val x = "abc" ; import x._ // type java.lang.String
@@ -49,10 +50,7 @@ trait Imports {
* into the compiler scopes.
*/
def sessionWildcards: List[Type] = {
- importHandlers flatMap {
- case x if x.importsWildcard => x.targetType
- case _ => None
- } distinct
+ importHandlers filter (_.importsWildcard) map (_.targetType) distinct
}
def wildcardTypes = languageWildcards ++ sessionWildcards
@@ -63,14 +61,15 @@ trait Imports {
def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedTermNamed(name: String) = importedTermSymbols find (_.name.toString == name)
+ def importedTermNamed(name: String): Symbol =
+ importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
*/
def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
- val session = importHandlers filter (_.targetType.isDefined) map { mh =>
- (mh.targetType.get.typeSymbol, mh.importedSymbols)
+ val session = importHandlers filter (_.targetType != NoType) map { mh =>
+ (mh.targetType.typeSymbol, mh.importedSymbols)
}
lang ++ session
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index 9c5299b633..d96e8b07fc 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -16,7 +16,7 @@ import collection.mutable.ListBuffer
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
- import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage, getModuleIfDefined }
type ExecResult = Any
import intp.{ debugging, afterTyper }
@@ -24,14 +24,13 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
private var verbosity: Int = 0
def resetVerbosity() = verbosity = 0
- def getType(name: String, isModule: Boolean) = {
- val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
- try Some(f(name).tpe)
- catch { case _: MissingRequirementError => None }
- }
-
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
+ def getSymbol(name: String, isModule: Boolean) = (
+ if (isModule) getModuleIfDefined(name)
+ else getModuleIfDefined(name)
+ )
+ def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
+ def typeOf(name: String) = getType(name, false)
+ def moduleOf(name: String) = getType(name, true)
trait CompilerCompletion {
def tp: Type
@@ -46,9 +45,9 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
private def anyMembers = AnyClass.tpe.nonPrivateMembers
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
- def tos(sym: Symbol) = sym.name.decode.toString
- def memberNamed(s: String) = members find (x => tos(x) == s)
- def hasMethod(s: String) = methods exists (x => tos(x) == s)
+ def tos(sym: Symbol): String = sym.decodedName
+ def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
+ def hasMethod(s: String) = memberNamed(s).isMethod
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
@@ -62,6 +61,13 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def packageNames = packages map tos
def aliasNames = aliases map tos
}
+
+ object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+ override def memberNamed(s: String) = NoSymbol
+ override def members = Nil
+ override def follow(s: String) = None
+ override def alternativesFor(id: String) = Nil
+ }
object TypeMemberCompletion {
def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
@@ -90,7 +96,8 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
}
def apply(tp: Type): TypeMemberCompletion = {
- if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+ if (tp eq NoType) NoTypeCompletion
+ else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
else new TypeMemberCompletion(tp)
}
def imported(tp: Type) = new ImportCompletion(tp)
@@ -118,7 +125,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
debugging(tp + " completions ==> ")(filtered(memberNames))
override def follow(s: String): Option[CompletionAware] =
- debugging(tp + " -> '" + s + "' ==> ")(memberNamed(s) map (x => TypeMemberCompletion(x.tpe)))
+ debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
override def alternativesFor(id: String): List[String] =
debugging(id + " alternatives ==> ") {
@@ -155,28 +162,29 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
object ids extends CompletionAware {
override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
// now we use the compiler for everything.
- override def follow(id: String) = {
- if (completions(0) contains id) {
- intp typeOfExpression id map { tpe =>
- def default = TypeMemberCompletion(tpe)
-
- // only rebinding vals in power mode for now.
- if (!isReplPower) default
- else intp runtimeClassAndTypeOfTerm id match {
- case Some((clazz, runtimeType)) =>
- val sym = intp.symbolOfTerm(id)
- if (sym.isStable) {
- val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
- TypeMemberCompletion(tpe, runtimeType, param)
- }
- else default
- case _ =>
- default
+ override def follow(id: String): Option[CompletionAware] = {
+ if (!completions(0).contains(id))
+ return None
+
+ val tpe = intp typeOfExpression id
+ if (tpe == NoType)
+ return None
+
+ def default = Some(TypeMemberCompletion(tpe))
+
+ // only rebinding vals in power mode for now.
+ if (!isReplPower) default
+ else intp runtimeClassAndTypeOfTerm id match {
+ case Some((clazz, runtimeType)) =>
+ val sym = intp.symbolOfTerm(id)
+ if (sym.isStable) {
+ val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+ Some(TypeMemberCompletion(tpe, runtimeType, param))
}
- }
+ else default
+ case _ =>
+ default
}
- else
- None
}
override def toString = "<repl ids> (%s)".format(completions(0).size)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index b64f14e929..c742ab89c0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -169,7 +169,7 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
- def targetType = intp.typeOfExpression("" + expr)
+ def targetType: Type = intp.typeOfExpression("" + expr)
override def isLegalTopLevel = true
def createImportForName(name: Name): String = {
@@ -199,10 +199,10 @@ trait MemberHandlers {
def importedSymbols = individualSymbols ++ wildcardSymbols
lazy val individualSymbols: List[Symbol] =
- atPickler(targetType.toList flatMap (tp => individualNames map (tp nonPrivateMember _)))
+ atPickler(individualNames map (targetType nonPrivateMember _))
lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) atPickler(targetType.toList flatMap (_.nonPrivateMembers))
+ if (importsWildcard) atPickler(targetType.nonPrivateMembers)
else Nil
/** Complete list of names imported by a wildcard */
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
index 7377953263..8e215cf63b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
@@ -84,7 +84,7 @@ trait Naming {
var x = 0
() => { x += 1 ; x }
}
- def freshUserVarName() = userVar()
+ def freshUserVarName() = userVar()
def freshInternalVarName() = internalVar()
def resetAllCreators() {
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index db2f9573ec..b4a9b9b0e3 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -15,54 +15,12 @@ import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
-trait SharesGlobal {
- type GlobalType <: Global
- val global: GlobalType
-
- // This business gets really old:
- //
- // found : power.intp.global.Symbol
- // required: global.Symbol
- //
- // Have tried many ways to cast it aside, this is the current winner.
- // Todo: figure out a way to abstract over all the type members.
- type AnySymbol = Global#Symbol
- type AnyType = Global#Type
- type AnyName = Global#Name
- type AnyTree = Global#Tree
-
- type Symbol = global.Symbol
- type Type = global.Type
- type Name = global.Name
- type Tree = global.Tree
-
- implicit def upDependentSymbol(x: AnySymbol): Symbol = x.asInstanceOf[Symbol]
- implicit def upDependentType(x: AnyType): Type = x.asInstanceOf[Type]
- implicit def upDependentName(x: AnyName): Name = x.asInstanceOf[Name]
- implicit def upDependentTree(x: AnyTree): Tree = x.asInstanceOf[Tree]
-}
-
-object Power {
- def apply(intp: IMain): Power = apply(null, intp)
- def apply(repl: ILoop): Power = apply(repl, repl.intp)
- def apply(repl: ILoop, intp: IMain): Power =
- new Power(repl, intp) {
- type GlobalType = intp.global.type
- final val global: intp.global.type = intp.global
- }
-}
-
/** A class for methods to be injected into the intp in power mode.
*/
-abstract class Power(
- val repl: ILoop,
- val intp: IMain
-) extends SharesGlobal {
- import intp.{
- beQuietDuring, typeOfExpression, getCompilerClass, getCompilerModule,
- interpret, parse
- }
- import global._
+class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) {
+ import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
+ import intp.global._
+ import definitions.{ manifestToType, getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -108,7 +66,7 @@ abstract class Power(
}
class PackageSlurper(pkgName: String) extends SymSlurper {
- val pkgSymbol = getCompilerModule(pkgName)
+ val pkgSymbol = getModuleIfDefined(pkgName)
val modClass = pkgSymbol.moduleClass
/** Looking for dwindling returns */
@@ -130,11 +88,11 @@ abstract class Power(
private def customInit = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
def banner = customBanner getOrElse """
- |** Power User mode enabled - BEEP BOOP SPIZ **
+ |** Power User mode enabled - BEEP WHIR GYVE **
|** :phase has been set to 'typer'. **
|** scala.tools.nsc._ has been imported **
- |** global._ and definitions._ also imported **
- |** Try :help, vals.<tab>, power.<tab> **
+ |** global._, definitions._ also imported **
+ |** Try :help, :vals, power.<tab> **
""".stripMargin.trim
private def initImports = List(
@@ -142,8 +100,9 @@ abstract class Power(
"scala.collection.JavaConverters._",
"intp.global.{ error => _, _ }",
"definitions.{ getClass => _, _ }",
- "power.Implicits._",
- "power.rutil._"
+ "power.rutil._",
+ "replImplicits._",
+ "treedsl.CODE._"
)
def init = customInit match {
@@ -155,12 +114,23 @@ abstract class Power(
*/
def unleash(): Unit = beQuietDuring {
// First we create the ReplVals instance and bind it to $r
- intp.bind("$r", new ReplVals(repl))
+ intp.bind("$r", replVals)
// Then we import everything from $r.
intp interpret ("import " + intp.pathToTerm("$r") + "._")
// And whatever else there is to do.
init.lines foreach (intp interpret _)
}
+ def valsDescription: String = {
+ def to_str(m: Symbol) = "%12s %s".format(
+ m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
+
+ ( rutil.info[ReplValsImpl].declares
+ filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
+ sortBy (_.decodedName)
+ map to_str
+ mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
+ )
+ }
trait LowPriorityInternalInfo {
implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
@@ -180,25 +150,7 @@ abstract class Power(
private def symbol = symbol_
private def name = name_
- // Would love to have stuff like existential types working,
- // but very unfortunately those manifests just stuff the relevant
- // information into the toString method. Boo.
- private def manifestToType(m: Manifest[_]): Type = m match {
- case x: AnyValManifest[_] =>
- getCompilerClass("scala." + x).tpe
- case _ =>
- val name = m.erasure.getName
- if (name endsWith nme.MODULE_SUFFIX_STRING) getCompilerModule(name dropRight 1).tpe
- else {
- val sym = getCompilerClass(name)
- val args = m.typeArguments
-
- if (args.isEmpty) sym.tpe
- else typeRef(NoPrefix, sym, args map manifestToType)
- }
- }
-
- def symbol_ : Symbol = getCompilerClass(erasure.getName)
+ def symbol_ : Symbol = getClassIfDefined(erasure.getName)
def tpe_ : Type = manifestToType(man)
def name_ : Name = symbol.name
def companion = symbol.companionSymbol
@@ -207,9 +159,10 @@ abstract class Power(
def owner = symbol.owner
def owners = symbol.ownerChain drop 1
def defn = symbol.defString
+ def decls = symbol.info.decls
- def declares = members filter (_.owner == symbol)
- def inherits = members filterNot (_.owner == symbol)
+ def declares = decls.toList
+ def inherits = members filterNot (declares contains _)
def types = members filter (_.name.isTypeName)
def methods = members filter (_.isMethod)
def overrides = declares filter (_.isOverride)
@@ -222,7 +175,7 @@ abstract class Power(
def bts = info.baseTypeSeq.toList
def btsmap = bts map (x => (x, x.decls.toList)) toMap
def pkgName = Option(erasure.getPackage) map (_.getName)
- def pkg = pkgName map getCompilerModule getOrElse NoSymbol
+ def pkg = pkgName map getModuleIfDefined getOrElse NoSymbol
def pkgmates = pkg.tpe.members
def pkgslurp = pkgName match {
case Some(name) => new PackageSlurper(name) slurp()
@@ -232,8 +185,8 @@ abstract class Power(
def whoHas(name: String) = bts filter (_.decls exists (_.name.toString == name))
def <:<[U: Manifest](other: U) = tpe <:< InternalInfo[U].tpe
- def lub[U: Manifest](other: U) = global.lub(List(tpe, InternalInfo[U].tpe))
- def glb[U: Manifest](other: U) = global.glb(List(tpe, InternalInfo[U].tpe))
+ def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, InternalInfo[U].tpe))
+ def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, InternalInfo[U].tpe))
def shortClass = erasure.getName split "[$.]" last
override def toString = value match {
@@ -336,7 +289,7 @@ abstract class Power(
def pp() { intp prettyPrint slurp() }
}
- protected trait Implicits1 {
+ trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
@@ -366,11 +319,10 @@ abstract class Power(
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
}
- object Implicits extends Implicits2 { }
trait ReplUtilities {
- def module[T: Manifest] = getCompilerModule(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
- def clazz[T: Manifest] = getCompilerClass(manifest[T].erasure.getName)
+ def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
+ def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
def info[T: Manifest] = InternalInfo[T]
def ?[T: Manifest] = InternalInfo[T]
def url(s: String) = {
@@ -395,17 +347,13 @@ abstract class Power(
}
lazy val rutil: ReplUtilities = new ReplUtilities { }
-
- lazy val phased: Phased = new Phased with SharesGlobal {
- type GlobalType = Power.this.global.type
- final val global: Power.this.global.type = Power.this.global
- }
+ lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def context(code: String) = analyzer.rootContext(unit(code))
def source(code: String) = new BatchSourceFile("<console>", code)
def unit(code: String) = new CompilationUnit(source(code))
def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String): Type = intp.typeOfExpression(id) getOrElse NoType
+ def typeOf(id: String) = intp.typeOfExpression(id)
override def toString = """
|** Power mode status **
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 2f2489b242..6e5dec4205 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -6,15 +6,68 @@
package scala.tools.nsc
package interpreter
-final class ReplVals(r: ILoop) {
- lazy val repl = r
- lazy val intp = r.intp
- lazy val power = r.power
- lazy val reader = r.in
- lazy val vals = this
- lazy val global = intp.global
- lazy val isettings = intp.isettings
- lazy val completion = reader.completion
- lazy val history = reader.history
- lazy val phased = power.phased
+/** A class which the repl utilizes to expose predefined objects.
+ * The base implementation is empty; the standard repl implementation
+ * is StdReplVals.
+ */
+abstract class ReplVals { }
+
+class StdReplVals(final val r: ILoop) extends ReplVals {
+ final lazy val repl = r
+ final lazy val intp = r.intp
+ final lazy val power = r.power
+ final lazy val reader = r.in
+ final lazy val vals = this
+ final lazy val global: intp.global.type = intp.global
+ final lazy val isettings = intp.isettings
+ final lazy val completion = reader.completion
+ final lazy val history = reader.history
+ final lazy val phased = power.phased
+ final lazy val analyzer = global.analyzer
+
+ final lazy val treedsl = new { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+ final lazy val typer = analyzer.newTyper(
+ analyzer.rootContext(
+ power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
+ )
+ )
+
+ final lazy val replImplicits = new power.Implicits2 {
+ import intp.global._
+
+ private val manifestFn = ReplVals.mkManifestToType[intp.global.type](global)
+ implicit def mkManifestToType(sym: Symbol) = manifestFn(sym)
+ }
+
+ def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
+}
+
+object ReplVals {
+ /** Latest attempt to work around the challenge of foo.global.Type
+ * not being seen as the same type as bar.global.Type even though
+ * the globals are the same. Dependent method types to the rescue.
+ */
+ def mkManifestToType[T <: Global](global: T) = {
+ import global._
+ import definitions._
+
+ /** We can't use definitions.manifestToType directly because we're passing
+ * it to map and the compiler refuses to perform eta expansion on a method
+ * with a dependent return type. (Can this be relaxed?) To get around this
+ * I have this forwarder which widens the type and then cast the result back
+ * to the dependent type.
+ */
+ def manifestToType(m: OptManifest[_]): Global#Type =
+ definitions.manifestToType(m)
+
+ class AppliedTypeFromManifests(sym: Symbol) {
+ def apply[M](implicit m1: Manifest[M]): Type =
+ appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x).asInstanceOf[Type]))
+
+ def apply[M1, M2](implicit m1: Manifest[M1], m2: Manifest[M2]): Type =
+ appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type]))
+ }
+
+ (sym: Symbol) => new AppliedTypeFromManifests(sym)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index 90cb827280..01f3619ba7 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -88,7 +88,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
val entries = mutable.HashMap[String, Entry]()
override def isDirectory = true
- override def iterator = entries.valuesIterator
+ override def iterator: Iterator[Entry] = entries.valuesIterator
override def lookupName(name: String, directory: Boolean): Entry = {
if (directory) entries(name + "/")
else entries(name)
@@ -110,7 +110,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
- def iterator = {
+ def iterator: Iterator[Entry] = {
val zipFile = new ZipFile(file)
val root = new DirEntry("/")
val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
@@ -151,13 +151,17 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) {
}
final class URLZipArchive(val url: URL) extends ZipArchive(null) {
- def iterator = {
+ def iterator: Iterator[Entry] = {
val root = new DirEntry("/")
val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input)))
@tailrec def loop() {
val zipEntry = in.getNextEntry()
+ class EmptyFileEntry() extends Entry(zipEntry.getName) {
+ override def toByteArray: Array[Byte] = null
+ override def sizeOption = Some(0)
+ }
class FileEntry() extends Entry(zipEntry.getName) {
override val toByteArray: Array[Byte] = {
val len = zipEntry.getSize().toInt
@@ -186,7 +190,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
if (zipEntry.isDirectory)
dir
else {
- val f = new FileEntry()
+ val f = if (zipEntry.getSize() == 0) new EmptyFileEntry() else new FileEntry()
dir.entries(f.name) = f
}
in.closeEntry()
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 742e9e03ca..0d7afdc4ec 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -547,7 +547,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(newTermName("runtime")), tpnme.AnnotationDefaultATTR), List(List()))
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), List(List()))
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
@@ -794,9 +794,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
accept(INTERFACE)
val pos = in.currentPos
val name = identForType()
- val parents = List(scalaDot(newTypeName("Annotation")),
- Select(javaLangDot(newTermName("annotation")), newTypeName("Annotation")),
- scalaDot(newTypeName("ClassfileAnnotation")))
+ val parents = List(scalaDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ scalaDot(tpnme.ClassfileAnnotation))
val (statics, body) = typeBody(AT, name)
def getValueMethodType(tree: Tree) = tree match {
case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
@@ -838,18 +838,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
val predefs = List(
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("values"), List(),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.values, List(),
List(List()),
arrayOf(enumType),
blankExpr),
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("valueOf"), List(),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
List(List(makeParam("x", TypeTree(StringClass.tpe)))),
enumType,
blankExpr))
accept(RBRACE)
val superclazz =
- AppliedTypeTree(javaLangDot(newTypeName("Enum")), List(enumType))
+ AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
ClassDef(mods, name, List(),
makeTemplate(superclazz :: interfaces, body))
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 420fba911b..e5748b7c23 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -36,6 +36,9 @@ trait Patterns extends ast.TreeDSL {
// case _ => NoSymbol
// }
+ private lazy val dummyMethod =
+ new TermSymbol(NoSymbol, NoPosition, newTermName("matching$dummy"))
+
// Fresh patterns
def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
@@ -191,9 +194,9 @@ trait Patterns extends ast.TreeDSL {
// As yet I can't testify this is doing any good relative to using
// tpt.tpe, but it doesn't seem to hurt either.
private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = typeRef(NoPrefix, ConsClass, List(packedType))
- private lazy val listRef = typeRef(NoPrefix, ListClass, List(packedType))
- private lazy val seqRef = typeRef(NoPrefix, SeqClass, List(packedType))
+ private lazy val consRef = appliedType(ConsClass.typeConstructor, List(packedType))
+ private lazy val listRef = appliedType(ListClass.typeConstructor, List(packedType))
+ private lazy val seqRef = appliedType(SeqClass.typeConstructor, List(packedType))
private def thisSeqRef = {
val tc = (tree.tpe baseType SeqClass).typeConstructor
@@ -205,7 +208,6 @@ trait Patterns extends ast.TreeDSL {
private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
case _ =>
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "matching$dummy")
val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
Apply(TypeTree(consType), List(hd, tl)) setType consRef
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 12306606e4..f19a285d7c 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -47,14 +47,23 @@ abstract class Reporter {
finally incompleteHandler = saved
}
- var cancelled = false
- def hasErrors = ERROR.count > 0 || cancelled
- def hasWarnings = WARNING.count > 0
+ var cancelled = false
+ def hasErrors = ERROR.count > 0 || cancelled
+ def hasWarnings = WARNING.count > 0
- def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
- def warning(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, WARNING, false)) }
- def error(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, ERROR, false)) }
- def incompleteInputError(pos: Position, msg: String ) {
+ /** For sending a message which should not be labeled as a warning/error,
+ * but also shouldn't require -verbose to be visible.
+ */
+ def echo(msg: String): Unit = info(NoPosition, msg, true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+
+ /** Informational messages, suppressed unless -verbose or force=true. */
+ def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
+
+ /** Warnings and errors. */
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
index 800af55861..f55d0684c8 100644
--- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
@@ -13,8 +13,6 @@ import scala.tools.util.AbstractTimer
* timings.
*/
class ReporterTimer(reporter: Reporter) extends AbstractTimer {
-
def issue(msg: String, duration: Long) =
reporter.info(null, "[" + msg + " in " + duration + "ms]", false)
-
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 7fcfb6fc6d..efd5323ce2 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -67,6 +67,8 @@ trait ScalaSettings extends AbsScalaSettings
val future = BooleanSetting ("-Xfuture", "Turn on future language features.")
val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
+ val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+ val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index 9fbf649525..a47bfda8c1 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package symtab
import ast.{Trees, TreePrinters, DocComments}
-
import util._
-abstract class SymbolTable extends reflect.internal.SymbolTable
+abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index e67ce90cfa..a158012f9f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -365,6 +365,13 @@ abstract class ClassfileParser {
case arr: Type => Constant(arr)
}
}
+
+ private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
+ val decodedLength = ByteCodecs.decode(bytes)
+ val arr = new Array[Byte](decodedLength)
+ System.arraycopy(bytes, 0, arr, 0, decodedLength)
+ arr
+ }
def getBytes(index: Int): Array[Byte] = {
if (index <= 0 || len <= index) errorBadIndex(index)
@@ -372,11 +379,10 @@ abstract class ClassfileParser {
if (value eq null) {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
+ val len = in.getChar(start + 1)
val bytes = new Array[Byte](len)
- Array.copy(in.buf, start + 3, bytes, 0, len)
- val decodedLength = ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ System.arraycopy(in.buf, start + 3, bytes, 0, len)
+ value = getSubArray(bytes)
values(index) = value
}
value
@@ -394,9 +400,7 @@ abstract class ClassfileParser {
val len = in.getChar(start + 1)
bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
}
- val bytes = bytesBuffer.toArray
- val decodedLength = ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ value = getSubArray(bytesBuffer.toArray)
values(indices.head) = value
}
value
@@ -435,19 +439,20 @@ abstract class ClassfileParser {
/** Return the class symbol of the given name. */
def classNameToSymbol(name: Name): Symbol = {
- def loadClassSymbol(name: Name) = {
- val s = name.toString
- val file = global.classPath findSourceFile s getOrElse {
- MissingRequirementError.notFound("class " + s)
+ def loadClassSymbol(name: Name): Symbol = {
+ val file = global.classPath findSourceFile ("" +name) getOrElse {
+ warning("Class " + name + " not found - continuing with a stub.")
+ return NoSymbol.newClass(name.toTypeName)
}
- val completer = new global.loaders.ClassfileLoader(file)
+ val completer = new global.loaders.ClassfileLoader(file)
var owner: Symbol = definitions.RootClass
- var sym: Symbol = NoSymbol
- var ss: String = null
- var start = 0
- var end = s indexOf '.'
+ var sym: Symbol = NoSymbol
+ var ss: Name = null
+ var start = 0
+ var end = name indexOf '.'
+
while (end > 0) {
- ss = s.substring(start, end)
+ ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
sym = owner.newPackage(NoPosition, ss) setInfo completer
@@ -456,17 +461,16 @@ abstract class ClassfileParser {
}
owner = sym.moduleClass
start = end + 1
- end = s.indexOf('.', start)
+ end = name.indexOf('.', start)
}
- ss = s substring start
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newClass(NoPosition, newTypeName(ss)) setInfo completer
- owner.info.decls enter sym
- if (settings.debug.value && settings.verbose.value)
+ ss = name.subName(0, start)
+ owner.info.decls lookup ss orElse {
+ sym = owner.newClass(NoPosition, ss.toTypeName) setInfo completer
+ if (opt.verboseDebug)
println("loaded "+sym+" from file "+file)
+
+ owner.info.decls enter sym
}
- sym
}
def lookupClass(name: Name) = try {
@@ -682,8 +686,6 @@ abstract class ClassfileParser {
while (!isDelimiter(sig(index))) { index += 1 }
sig.subName(start, index)
}
- def existentialType(tparams: List[Symbol], tp: Type): Type =
- if (tparams.isEmpty) tp else ExistentialType(tparams, tp)
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig(index); index += 1
tag match {
@@ -716,7 +718,12 @@ abstract class ClassfileParser {
index += 1
val bounds = variance match {
case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
- case '-' => TypeBounds.lower(sig2type(tparams, skiptvs))
+ case '-' =>
+ val tp = sig2type(tparams, skiptvs)
+ // sig2type seems to return AnyClass regardless of the situation:
+ // we don't want Any as a LOWER bound.
+ if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
val newtparam = sym.newExistential(sym.pos, newTypeName("?"+i)) setInfo bounds
@@ -729,14 +736,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0)
- existentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
+ newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
} else if (classSym.isMonomorphicType) {
tp
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = typeRef(pre, classSym, eparams.map(_.tpe))
- val res = existentialType(eparams, t)
+ val res = newExistentialType(eparams, t)
if (settings.debug.value && settings.verbose.value)
println("raw type " + classSym + " -> " + res)
res
@@ -970,7 +977,7 @@ abstract class ClassfileParser {
Some(ScalaSigBytes(pool getBytes in.nextChar))
}
- def parseScalaLongSigBytes: Option[ScalaSigBytes] = try {
+ def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
val tag = in.nextByte.toChar
assert(tag == ARRAY_TAG)
val stringCount = in.nextChar
@@ -982,11 +989,6 @@ abstract class ClassfileParser {
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
- catch {
- case e: Throwable =>
- e.printStackTrace
- throw e
- }
/** Parse and return a single annotation. If it is malformed,
* return None.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index a203b8a78b..0b64a49a2c 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -25,10 +25,7 @@ abstract class ICodeReader extends ClassfileParser {
var instanceCode: IClass = null // the ICode class for the current symbol
var staticCode: IClass = null // the ICode class static members
- var method: IMethod = _ // the current IMethod
-
- val nothingName = newTermName(SCALA_NOTHING)
- val nullName = newTermName(SCALA_NULL)
+ var method: IMethod = NoIMethod // the current IMethod
var isScalaModule = false
/** Read back bytecode for the given class symbol. It returns
@@ -182,9 +179,9 @@ abstract class ICodeReader extends ClassfileParser {
}
override def classNameToSymbol(name: Name) = {
- val sym = if (name == nothingName)
+ val sym = if (name == fulltpnme.RuntimeNothing)
definitions.NothingClass
- else if (name == nullName)
+ else if (name == fulltpnme.RuntimeNull)
definitions.NullClass
else if (nme.isImplClassName(name)) {
val iface = definitions.getClass(nme.interfaceName(name))
@@ -194,7 +191,7 @@ abstract class ICodeReader extends ClassfileParser {
}
else if (nme.isModuleName(name)) {
val strippedName = nme.stripModuleSuffix(name)
- val sym = forceMangledName(strippedName.decode, true)
+ val sym = forceMangledName(newTermName(strippedName.decode), true)
if (sym == NoSymbol) definitions.getModule(strippedName)
else sym
@@ -629,7 +626,7 @@ abstract class ICodeReader extends ClassfileParser {
skipAttributes()
code.toBasicBlock
- assert(method.code ne null)
+ assert(method.hasCode, method)
// reverse parameters, as they were prepended during code generation
method.params = method.params.reverse
@@ -692,7 +689,7 @@ abstract class ICodeReader extends ClassfileParser {
mutable.Map(jmpTargets.toSeq map (_ -> code.newBlock): _*)
val blocks = makeBasicBlocks
- var otherBlock: BasicBlock = null
+ var otherBlock: BasicBlock = NoBasicBlock
var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
@@ -991,7 +988,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(NoPosition, "loc" + idx).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(NoPosition, newTermName("loc" + idx)).setInfo(kind.toType);
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -1008,7 +1005,7 @@ abstract class ICodeReader extends ClassfileParser {
/** add a method param with the given index. */
def enterParam(idx: Int, kind: TypeKind) = {
- val sym = method.symbol.newVariable(NoPosition, "par" + idx).setInfo(kind.toType)
+ val sym = method.symbol.newVariable(NoPosition, newTermName("par" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, true)
assert(!locals.isDefinedAt(idx))
locals += (idx -> List((l, kind)))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
index 728593abe7..676c8f09da 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
@@ -49,7 +49,7 @@ abstract class MetaParser{
val sym = locals.lookup(newTypeName(str))
if (sym != NoSymbol) sym.tpe
else {
- val tp = definitions.getClass(str).tpe;
+ val tp = definitions.getRequiredClass(str).tpe;
if (token != "[") tp
else {
val args = new ListBuffer[Type];
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 95ef799720..e0cb0848be 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -34,6 +34,8 @@ abstract class TypeParser {
protected var busy: Boolean = false // lock to detect recursive reads
+ private implicit def stringToTermName(s: String): TermName = newTermName(s)
+
private object unpickler extends UnPickler {
val global: TypeParser.this.global.type = TypeParser.this.global
}
@@ -153,8 +155,8 @@ abstract class TypeParser {
val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append "Boxed")
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append "MgdPtr")
+ clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
+ clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
/* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 4e8e678dc8..c59a819b02 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -148,8 +148,8 @@ abstract class AddInterfaces extends InfoTransform {
*/
def mixinToImplClass(tp: Type): Type = erasure(sym,
tp match { //@MATN: no normalize needed (comes after erasure)
- case TypeRef(pre, sym, args) if sym.needsImplClass =>
- typeRef(pre, implClass(sym), args)
+ case TypeRef(pre, sym, _) if sym.needsImplClass =>
+ typeRef(pre, implClass(sym), Nil)
case _ =>
tp
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index d001a0af8b..034628e95f 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -22,9 +22,35 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
+ private def clearStatics() {
+ newStaticMembers.clear()
+ newStaticInits.clear()
+ symbolsStoredAsStatic.clear()
+ }
+ private def savingStatics[T](body: => T): T = {
+ val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
+ val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
+ val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
+ val result = body
+
+ clearStatics()
+ newStaticMembers ++= savedNewStaticMembers
+ newStaticInits ++= savedNewStaticInits
+ symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
+
+ result
+ }
+ private def transformTemplate(tree: Tree) = {
+ val Template(parents, self, body) = tree
+ clearStatics()
+ val newBody = transformTrees(body)
+ val templ = treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
+ try addStaticInits(templ) // postprocess to include static ctors
+ finally clearStatics()
+ }
private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
/** Kludge to provide a safe fix for #4560:
@@ -60,7 +86,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
private def typedWithPos(pos: Position)(tree: Tree) =
- localTyper typed { atPos(pos)(tree) }
+ localTyper.typedPos(pos)(tree)
/** A value class is defined to be only Java-compatible values: unit is
* not part of it, as opposed to isValueClass in definitions. scala.Int is
@@ -71,7 +97,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** The boxed type if it's a primitive; identity otherwise.
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
-
+
override def transform(tree: Tree): Tree = tree match {
/* Transforms dynamic calls (i.e. calls to methods that are undefined
@@ -106,6 +132,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* refinement, where the refinement defines a parameter based on a
* type variable. */
case ad@ApplyDynamic(qual0, params) =>
+ if (settings.logReflectiveCalls.value)
+ unit.echo(ad.pos, "method invocation uses reflection")
+
val typedPos = typedWithPos(ad.pos) _
assert(ad.symbol.isPublic)
@@ -113,11 +142,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### CREATING THE METHOD CACHE ### */
- def addStaticVariableToClass(forName: String, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
+ def addStaticVariableToClass(forName: TermName, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
val varSym = (
- currentClass.newVariable(ad.pos, mkTerm(forName))
- setFlag (PRIVATE | STATIC | SYNTHETIC)
- setInfo (forType)
+ currentClass.newVariable(ad.pos, mkTerm("" + forName))
+ setFlag PRIVATE | STATIC | SYNTHETIC
+ setInfo forType
)
if (isFinal) varSym setFlag FINAL
else varSym.addAnnotation(VolatileAttr)
@@ -165,9 +194,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
case Pair(reflMethodSym, List(forReceiverSym)) =>
(REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))
}
@@ -194,18 +223,18 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
val reflMethodCacheSym: Symbol =
- addStaticVariableToClass("reflMethod$Cache", MethodClass.tpe, NULL, false)
+ addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
val reflClassCacheSym: Symbol =
- addStaticVariableToClass("reflClass$Cache", SoftReferenceClass.tpe, NULL, false)
+ addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
def isCacheEmpty(receiver: Symbol): Tree =
reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
case Pair(reflMethodSym, List(forReceiverSym)) =>
BLOCK(
IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
@@ -241,13 +270,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = addStaticVariableToClass("reflPoly$Cache", SoftReferenceClass.tpe, mkNewPolyCache, false)
+ val reflPolyCacheSym: Symbol = (
+ addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
+ )
def getPolyCache = gen.mkCast(fn(safeREF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe)
+ addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe)
{ case Pair(reflMethodSym, List(forReceiverSym)) =>
val methodSym = reflMethodSym.newVariable(ad.pos, mkTerm("method")) setInfo MethodClass.tpe
@@ -271,59 +302,14 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
- val testForNumber: Tree => Tree = {
- qual1 => (qual1 IS_OBJ BoxedNumberClass.tpe) OR (qual1 IS_OBJ BoxedCharacterClass.tpe)
- }
- val testForBoolean: Tree => Tree = {
- qual1 => (qual1 IS_OBJ BoxedBooleanClass.tpe)
- }
- val testForNumberOrBoolean: Tree => Tree = {
- qual1 => testForNumber(qual1) OR testForBoolean(qual1)
- }
-
- def postfixTest(name: Name): Option[(String, Tree => Tree)] = {
- var runtimeTest: Tree => Tree = testForNumber
- val newName = name match {
- case nme.UNARY_! => runtimeTest = testForBoolean ; "takeNot"
- case nme.UNARY_+ => "positive"
- case nme.UNARY_- => "negate"
- case nme.UNARY_~ => "complement"
- case nme.toByte => "toByte"
- case nme.toShort => "toShort"
- case nme.toChar => "toCharacter"
- case nme.toInt => "toInteger"
- case nme.toLong => "toLong"
- case nme.toFloat => "toFloat"
- case nme.toDouble => "toDouble"
- case _ => return None
- }
- Some(newName, runtimeTest)
- }
- def infixTest(name: Name): Option[(String, Tree => Tree)] = {
- val (newName, runtimeTest) = name match {
- case nme.OR => ("takeOr", testForNumberOrBoolean)
- case nme.XOR => ("takeXor", testForNumberOrBoolean)
- case nme.AND => ("takeAnd", testForNumberOrBoolean)
- case nme.EQ => ("testEqual", testForNumberOrBoolean)
- case nme.NE => ("testNotEqual", testForNumberOrBoolean)
- case nme.ADD => ("add", testForNumber)
- case nme.SUB => ("subtract", testForNumber)
- case nme.MUL => ("multiply", testForNumber)
- case nme.DIV => ("divide", testForNumber)
- case nme.MOD => ("takeModulo", testForNumber)
- case nme.LSL => ("shiftSignedLeft", testForNumber)
- case nme.LSR => ("shiftLogicalRight", testForNumber)
- case nme.ASR => ("shiftSignedRight", testForNumber)
- case nme.LT => ("testLessThan", testForNumber)
- case nme.LE => ("testLessOrEqualThan", testForNumber)
- case nme.GE => ("testGreaterOrEqualThan", testForNumber)
- case nme.GT => ("testGreaterThan", testForNumber)
- case nme.ZOR => ("takeConditionalOr", testForBoolean)
- case nme.ZAND => ("takeConditionalAnd", testForBoolean)
- case _ => return None
- }
- Some(newName, runtimeTest)
- }
+ def testForName(name: Name): Tree => Tree = t => (
+ if (nme.CommonOpNames(name))
+ gen.mkMethodCall(getMember(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean), t :: Nil)
+ else if (nme.BooleanOpNames(name))
+ t IS_OBJ BoxedBooleanClass.tpe
+ else
+ gen.mkMethodCall(getMember(BoxesRunTimeClass, nme.isBoxedNumber), t :: Nil)
+ )
/** The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
@@ -332,12 +318,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* (If the compiler can verify qual is safe to inline, it will not create the block.)
*/
def getPrimitiveReplacementForStructuralCall(name: Name): Option[(Symbol, Tree => Tree)] = {
- val opt = (
- if (params.isEmpty) postfixTest(name)
- else if (params.tail.isEmpty) infixTest(name)
- else None
+ val methodName = (
+ if (params.isEmpty) nme.primitivePostfixMethodName(name)
+ else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
+ else nme.NO_NAME
)
- opt map { case (name, fn) => (getMember(BoxesRunTimeClass, name), fn) }
+ if (methodName == nme.NO_NAME) None
+ else Some((getMember(BoxesRunTimeClass, methodName), testForName(name)))
}
/* ### BOXING PARAMS & UNBOXING RESULTS ### */
@@ -502,7 +489,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* expected to be an AnyRef. */
val t: Tree = ad.symbol.tpe match {
case MethodType(mparams, resType) =>
- assert(params.length == mparams.length)
+ assert(params.length == mparams.length, mparams)
+
typedPos {
val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
qual = safeREF(sym)
@@ -554,31 +542,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- var savedNewStaticMembers : mutable.Buffer[Tree] = null
- var savedNewStaticInits : mutable.Buffer[Tree] = null
- var savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = null
- if(forMSIL) {
- savedNewStaticMembers = newStaticMembers.clone
- savedNewStaticInits = newStaticInits.clone
- savedSymbolsStoredAsStatic = symbolsStoredAsStatic.clone
- }
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- val transformedTemplate: Template = {
- var newBody = transformTrees(body)
- treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
- }
- val res = addStaticInits(transformedTemplate) // postprocess to include static ctors
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- if(forMSIL) {
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
- }
- res
+ if (forMSIL) savingStatics( transformTemplate(tree) )
+ else transformTemplate(tree)
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
@@ -641,15 +606,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* And, finally, be advised - scala symbol literal and the Symbol class of the compiler
* have little in common.
*/
- case symapp @ Apply(Select(Select(a @ Ident(nme.scala_), b @ nme.Symbol), nme.apply),
- List(Literal(Constant(symname: String)))) =>
+ case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
// add the symbol name to a map if it's not there already
- val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
- val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
-
+ val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+ val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
// create a reference to a static field
- val ntree = typedWithPos(symapp.pos)(safeREF(staticFieldSym))
-
+ val ntree = typedWithPos(tree.pos)(safeREF(staticFieldSym))
super.transform(ntree)
// This transform replaces Array(Predef.wrapArray(Array(...)), <manifest>)
@@ -669,19 +631,21 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* If it doesn't exist, i.e. the symbol is encountered the first time,
* it creates a new static field definition and initialization and returns it.
*/
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol = {
symbolsStoredAsStatic.getOrElseUpdate(symname, {
val theTyper = typer.atOwner(tree, currentClass)
// create a symbol for the static field
- val stfieldSym = currentClass.newVariable(pos, mkTerm("symbol$"))
- .setFlag(PRIVATE | STATIC | SYNTHETIC | FINAL)
- .setInfo(symbolType)
+ val stfieldSym = (
+ currentClass.newVariable(pos, mkTerm("symbol$"))
+ setFlag PRIVATE | STATIC | SYNTHETIC | FINAL
+ setInfo SymbolClass.tpe
+ )
currentClass.info.decls enter stfieldSym
// create field definition and initialization
- val stfieldDef = theTyper.typed { atPos(pos)(VAL(stfieldSym) === rhs) }
- val stfieldInit = theTyper.typed { atPos(pos)(safeREF(stfieldSym) === rhs) }
+ val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+ val stfieldInit = theTyper.typedPos(pos)(safeREF(stfieldSym) === rhs)
// add field definition to new defs
newStaticMembers append stfieldDef
@@ -689,6 +653,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
stfieldSym
})
+ }
/* finds the static ctor DefDef tree within the template if it exists. */
private def findStaticCtor(template: Template): Option[Tree] =
@@ -700,7 +665,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* changes the template for the class so that it contains a static constructor with symbol fields inits,
* augments an existing static ctor if one already existed.
*/
- private def addStaticInits(template: Template): Template =
+ private def addStaticInits(template: Template): Template = {
if (newStaticInits.isEmpty)
template
else {
@@ -722,11 +687,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// create new static ctor
val staticCtorSym = currentClass.newStaticConstructor(template.pos)
val rhs = Block(newStaticInits.toList, Literal(Constant()))
- val staticCtorTree = DefDef(staticCtorSym, rhs)
- localTyper.typed { atPos(template.pos)(staticCtorTree) }
+
+ localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
}
+ }
} // CleanUpTransformer
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 00b72bdc1c..4d4f4f4c27 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -254,26 +254,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
for ((accSym, accBody) <- outerAccessors)
if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
- // Conflicting symbol list from parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val parentSymbols = Map((for {
- p <- impl.parents
- if p.symbol.isTrait
- sym <- p.symbol.info.nonPrivateMembers
- if sym.isGetter && !sym.isOuterField
- } yield sym.name -> p): _*)
-
// Initialize all parameters fields that must be kept.
- val paramInits =
- for (acc <- paramAccessors if mustbeKept(acc)) yield {
- if (parentSymbols contains acc.name)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s in '%s'".format(
- acc.name, acc.name, parentSymbols(acc.name)))
-
- copyParam(acc, parameter(acc))
- }
+ val paramInits = paramAccessors filter mustbeKept map { acc =>
+ // Check for conflicting symbol amongst parents: see bug #1960.
+ // It would be better to mangle the constructor parameter name since
+ // it can only be used internally, but I think we need more robust name
+ // mangling before we introduce more of it.
+ val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+ if (conflict ne NoSymbol)
+ unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+
+ copyParam(acc, parameter(acc))
+ }
/** Return a single list of statements, merging the generic class constructor with the
* specialized stats. The original statements are retyped in the current class, and
@@ -285,10 +277,11 @@ abstract class Constructors extends Transform with ast.TreeDSL {
specBuf ++= specializedStats
def specializedAssignFor(sym: Symbol): Option[Tree] =
- specializedStats.find {
- case Assign(sel @ Select(This(_), _), rhs) if sel.symbol.hasFlag(SPECIALIZED) =>
- val (generic, _, _) = nme.splitSpecializedName(nme.localToGetter(sel.symbol.name))
- generic == nme.localToGetter(sym.name)
+ specializedStats find {
+ case Assign(sel @ Select(This(_), _), rhs) =>
+ ( (sel.symbol hasFlag SPECIALIZED)
+ && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
+ )
case _ => false
}
@@ -298,11 +291,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
* be an error to pass it to array_update(.., .., Object).
*/
def rewriteArrayUpdate(tree: Tree): Tree = {
- val array_update = definitions.ScalaRunTimeModule.info.member("array_update")
val adapter = new Transformer {
override def transform(t: Tree): Tree = t match {
- case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == array_update =>
- localTyper.typed(Apply(gen.mkAttributedSelect(xs, definitions.Array_update), List(idx, v)))
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
case _ => super.transform(t)
}
}
@@ -378,11 +370,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
EmptyTree)
List(localTyper.typed(tree))
- } else if (clazz.hasFlag(SPECIALIZED)) {
+ }
+ else if (clazz.hasFlag(SPECIALIZED)) {
// add initialization from its generic class constructor
- val (genericName, _, _) = nme.splitSpecializedName(clazz.name)
+ val genericName = nme.unspecializedName(clazz.name)
val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
- assert(genericClazz != NoSymbol)
+ assert(genericClazz != NoSymbol, clazz)
guardedCtorStats.get(genericClazz) match {
case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 9806857ff2..f3b1e77c8d 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -421,7 +421,7 @@ abstract class Erasure extends AddInterfaces
*/
/** The modifier typer which retypes with erased types. */
- class Eraser(context: Context) extends Typer(context) {
+ class Eraser(_context: Context) extends Typer(_context) {
private def safeToRemoveUnbox(cls: Symbol): Boolean =
(cls == definitions.NullClass) || isBoxedValueClass(cls)
@@ -866,7 +866,7 @@ abstract class Erasure extends AddInterfaces
unboundedGenericArrayLevel(arg.tpe) > 0) =>
val level = unboundedGenericArrayLevel(arg.tpe)
def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
+ gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
global.typer.typedPos(tree.pos) {
if (level == 1) isArrayTest(qual)
@@ -887,19 +887,30 @@ abstract class Erasure extends AddInterfaces
fun.symbol != Object_isInstanceOf) =>
// leave all other type tests/type casts, remove all other type applications
preErase(fun)
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
+ case Apply(fn @ Select(qual, name), args) if fn.symbol.owner == ArrayClass =>
+ // Have to also catch calls to abstract types which are bounded by Array.
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
// convert calls to apply/update/length on generic arrays to
// calls of ScalaRunTime.array_xxx method calls
- global.typer.typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
- else
+ global.typer.typedPos(tree.pos)({
+ val arrayMethodName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ }
+ gen.mkRuntimeCall(arrayMethodName, qual :: args)
+ })
+ }
+ else {
// store exact array erasure in map to be retrieved later when we might
// need to do the cast in adaptMember
treeCopy.Apply(
tree,
SelectFromArray(qual, name, erasure(tree.symbol, qual.tpe)).copyAttrs(fn),
args)
-
+ }
case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) =>
if (fn.symbol == Any_## || fn.symbol == Object_##) {
// This is unattractive, but without it we crash here on ().## because after
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index fcc03a82d0..cf7d6c94fe 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -68,6 +68,8 @@ abstract class ExplicitOuter extends InfoTransform
result
}
+
+ private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
@@ -134,7 +136,7 @@ abstract class ExplicitOuter extends InfoTransform
}
if (sym.owner.isTrait) sym setNotFlag PROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
- val p = sym.newValueParameter(sym.pos, "arg" + nme.OUTER)
+ val p = sym.newValueParameter(sym.pos, innerClassConstructorParamName)
.setInfo(sym.owner.outerClass.thisType)
MethodType(p :: params, restpe)
} else if (restpe ne restpe1)
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 443a6140dc..2180fd4f3a 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -17,6 +17,19 @@ abstract class LambdaLift extends InfoTransform {
/** the following two members override abstract members in Transform */
val phaseName: String = "lambdalift"
+
+ /** Converts types of captured variables to *Ref types.
+ */
+ def boxIfCaptured(sym: Symbol, tpe: Type, erasedTypes: Boolean) =
+ if (sym.isCapturedVariable) {
+ val symClass = tpe.typeSymbol
+ def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
+ if (isValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
+ else if (erasedTypes) objectRefClass.tpe
+ else appliedType(objectRefClass.typeConstructor, List(tpe))
+ if (sym.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
+ else refType(refClass, ObjectRefClass)
+ } else tpe
private val lifted = new TypeMap {
def apply(tp: Type): Type = tp match {
@@ -31,7 +44,8 @@ abstract class LambdaLift extends InfoTransform {
}
}
- def transformInfo(sym: Symbol, tp: Type): Type = lifted(tp)
+ def transformInfo(sym: Symbol, tp: Type): Type =
+ boxIfCaptured(sym, lifted(tp), erasedTypes = true)
protected def newTransformer(unit: CompilationUnit): Transformer =
new LambdaLifter(unit)
@@ -55,7 +69,10 @@ abstract class LambdaLift extends InfoTransform {
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
-
+
+ /** True if we are transforming under a ReferenceToBoxed node */
+ private var isBoxedRef = false
+
private type SymSet = TreeSet[Symbol]
private def newSymSet = new TreeSet[Symbol](_ isLess _)
@@ -116,22 +133,7 @@ abstract class LambdaLift extends InfoTransform {
}
changedFreeVars = true
debuglog("" + sym + " is free in " + enclosure);
- if (sym.isVariable && !sym.hasFlag(CAPTURED)) {
- // todo: We should merge this with the lifting done in liftCode.
- // We do have to lift twice: in liftCode, because Code[T] needs to see the lifted version
- // and here again because lazy bitmaps are introduced later and get lifted here.
- // But we should factor out the code and run it twice.
- sym setFlag CAPTURED
- val symClass = sym.tpe.typeSymbol
- atPhase(phase.next) {
- sym updateInfo (
- if (sym.hasAnnotation(VolatileAttr))
- if (isValueClass(symClass)) volatileRefClass(symClass).tpe else VolatileObjectRefClass.tpe
- else
- if (isValueClass(symClass)) refClass(symClass).tpe else ObjectRefClass.tpe
- )
- }
- }
+ if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
}
@@ -228,6 +230,7 @@ abstract class LambdaLift extends InfoTransform {
private def proxy(sym: Symbol) = {
def searchIn(enclosure: Symbol): Symbol = {
+ if (enclosure eq NoSymbol) throw new IllegalArgumentException("Could not find proxy for "+ sym.defString +" in "+ sym.ownerChain +" (currentOwner= "+ currentOwner +" )")
debuglog("searching for " + sym + "(" + sym.owner + ") in " + enclosure + " " + enclosure.logicallyEnclosingMember)
val ps = (proxies get enclosure.logicallyEnclosingMember).toList.flatten filter (_.name == sym.name)
@@ -339,7 +342,7 @@ abstract class LambdaLift extends InfoTransform {
EmptyTree
}
- private def postTransform(tree: Tree): Tree = {
+ private def postTransform(tree: Tree, isBoxedRef: Boolean = false): Tree = {
val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
@@ -362,8 +365,19 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
+ /** Wrap expr argument in new *Ref(..) constructor, but make
+ * sure that Try expressions stay at toplevel.
+ */
+ def refConstr(expr: Tree): Tree = expr match {
+ case Try(block, catches, finalizer) =>
+ Try(refConstr(block), catches map refConstrCase, finalizer)
+ case _ =>
+ Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(expr))
+ }
+ def refConstrCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(constructorArg))
+ refConstr(constructorArg)
})
} else tree
case Return(Block(stats, value)) =>
@@ -387,7 +401,7 @@ abstract class LambdaLift extends InfoTransform {
atPos(tree.pos)(proxyRef(sym))
else tree
else tree
- if (sym.isCapturedVariable)
+ if (sym.isCapturedVariable && !isBoxedRef)
atPos(tree.pos) {
val tp = tree.tpe
val elemTree = typer typed Select(tree1 setType sym.tpe, nme.elem)
@@ -405,10 +419,16 @@ abstract class LambdaLift extends InfoTransform {
tree
}
}
+
+ private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
- override def transform(tree: Tree): Tree =
- postTransform(super.transform(tree) setType lifted(tree.tpe))
-
+ override def transform(tree: Tree): Tree = tree match {
+ case ReferenceToBoxed(idt) =>
+ postTransform(preTransform(idt), isBoxedRef = true)
+ case _ =>
+ postTransform(preTransform(tree))
+ }
+
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
index f3f823d197..bc7d1754d4 100644
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
@@ -110,18 +110,10 @@ abstract class LiftCode extends Transform with TypingTransformers {
}
}
- /** Set of mutable local variables that are free in some inner method. */
- private val freeMutableVars: mutable.Set[Symbol] = new mutable.HashSet
- private val converted: mutable.Set[Symbol] = new mutable.HashSet // debug
-
override def transformUnit(unit: CompilationUnit) {
- freeMutableVars.clear()
- freeLocalsTraverser(unit.body)
atPhase(phase.next) {
super.transformUnit(unit)
}
- for (v <- freeMutableVars) //!!! remove
- assert(converted contains v, "unconverted: " + v + " in " + v.owner + " in unit " + unit)
}
override def transform(tree: Tree): Tree = {
@@ -137,24 +129,6 @@ abstract class LiftCode extends Transform with TypingTransformers {
result
}
} finally printTypings = saved
- case ValDef(mods, name, tpt, rhs) if (freeMutableVars(sym)) => // box mutable variables that are accessed from a local closure
- val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
- /* Creating a constructor argument if one isn't present. */
- val constructorArg = rhs match {
- case EmptyTree => gen.mkZero(atPhase(phase.prev)(sym.tpe))
- case _ => transform(rhs)
- }
- val rhs1 = typer.typedPos(rhs.pos) {
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(constructorArg))
- }
- sym resetFlag MUTABLE
- sym removeAnnotation VolatileAttr
- converted += sym // dereference boxed variables
- treeCopy.ValDef(tree, mods &~ MUTABLE, name, tpt1, rhs1)
- case Ident(name) if freeMutableVars(sym) =>
- localTyper.typedPos(tree.pos) {
- Select(tree setType sym.tpe, nme.elem)
- }
case _ =>
super.transform(tree)
}
@@ -170,74 +144,6 @@ abstract class LiftCode extends Transform with TypingTransformers {
New(TypeTree(appliedType(definitions.CodeClass.typeConstructor, List(treetpe.widen))),
List(List(arg)))
}
-
- /**
- * PP: There is apparently some degree of overlap between the CAPTURED
- * flag and the role being filled here. I think this is how this was able
- * to go for so long looking only at DefDef and Ident nodes, as bugs
- * would only emerge under more complicated conditions such as #3855.
- * I'll try to figure it all out, but if someone who already knows the
- * whole story wants to fill it in, that too would be great.
- *
- * XXX I found this had been cut and pasted between LiftCode and UnCurry,
- * and seems to be running in both.
- */
- private val freeLocalsTraverser = new Traverser {
- var currentMethod: Symbol = NoSymbol
- var maybeEscaping = false
-
- def withEscaping(body: => Unit) {
- val saved = maybeEscaping
- maybeEscaping = true
- try body
- finally maybeEscaping = saved
- }
-
- override def traverse(tree: Tree) = tree match {
- case DefDef(_, _, _, _, _, _) =>
- val lastMethod = currentMethod
- currentMethod = tree.symbol
- try super.traverse(tree)
- finally currentMethod = lastMethod
- /** A method call with a by-name parameter represents escape. */
- case Apply(fn, args) if fn.symbol.paramss.nonEmpty =>
- traverse(fn)
- for ((param, arg) <- treeInfo.zipMethodParamsAndArgs(tree)) {
- if (param.tpe != null && isByNameParamType(param.tpe))
- withEscaping(traverse(arg))
- else
- traverse(arg)
- }
-
- /** The rhs of a closure represents escape. */
- case Function(vparams, body) =>
- vparams foreach traverse
- withEscaping(traverse(body))
-
- /**
- * The appearance of an ident outside the method where it was defined or
- * anytime maybeEscaping is true implies escape.
- */
- case Ident(_) =>
- val sym = tree.symbol
- if (sym.isVariable && sym.owner.isMethod && (maybeEscaping || sym.owner != currentMethod)) {
- freeMutableVars += sym
- val symTpe = sym.tpe
- val symClass = symTpe.typeSymbol
- atPhase(phase.next) {
- def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
- if (isValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
- else appliedType(objectRefClass.typeConstructor, List(symTpe))
-
- sym updateInfo (
- if (sym.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
- else refType(refClass, ObjectRefClass))
- }
- }
- case _ =>
- super.traverse(tree)
- }
- }
}
/**
@@ -259,9 +165,6 @@ abstract class LiftCode extends Transform with TypingTransformers {
*/
class Reifier() {
- final val mirrorFullName = "scala.reflect.mirror"
- final val mirrorShortName = "$mr"
- final val mirrorPrefix = mirrorShortName + "."
final val scalaPrefix = "scala."
final val localPrefix = "$local"
final val memoizerName = "$memo"
@@ -311,16 +214,20 @@ abstract class LiftCode extends Transform with TypingTransformers {
// helper methods
- private def localName(sym: Symbol) = localPrefix + symIndex(sym)
+ private def localName(sym: Symbol): TermName =
+ newTermName(localPrefix + symIndex(sym))
private def call(fname: String, args: Tree*): Tree =
Apply(termPath(fname), args.toList)
private def mirrorSelect(name: String): Tree =
- termPath(mirrorPrefix + name)
+ termPath(nme.MIRROR_PREFIX + name)
+
+ private def mirrorCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.MIRROR_PREFIX append name), args: _*)
private def mirrorCall(name: String, args: Tree*): Tree =
- call(mirrorPrefix + name, args: _*)
+ call(nme.MIRROR_PREFIX + name, args: _*)
private def mirrorFactoryCall(value: Product, args: Tree*): Tree =
mirrorCall(value.productPrefix, args: _*)
@@ -385,7 +292,10 @@ abstract class LiftCode extends Transform with TypingTransformers {
else {
if (sym.isTerm) {
if (reifyDebug) println("Free: " + sym)
- mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), Ident(sym))
+ val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false)
+ def markIfCaptured(arg: Ident): Tree =
+ if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg
+ mirrorCall("freeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym)))
} else {
if (reifyDebug) println("Late local: " + sym)
registerReifiableSymbol(sym)
@@ -413,15 +323,15 @@ abstract class LiftCode extends Transform with TypingTransformers {
* Generate code to add type and annotation info to a reified symbol
*/
private def fillInSymbol(sym: Symbol): Tree = {
- val rset = Apply(Select(reifySymRef(sym), "setTypeSig"), List(reifyType(sym.info)))
+ val rset = Apply(Select(reifySymRef(sym), nme.setTypeSig), List(reifyType(sym.info)))
if (sym.annotations.isEmpty) rset
- else Apply(Select(rset, "setAnnotations"), List(reify(sym.annotations)))
+ else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations)))
}
/** Reify a scope */
private def reifyScope(scope: Scope): Tree = {
scope foreach registerReifiableSymbol
- mirrorCall("newScopeWith", scope.toList map reifySymRef: _*)
+ mirrorCall(nme.newScopeWith, scope.toList map reifySymRef: _*)
}
/** Reify a list of symbols that need to be created */
@@ -439,14 +349,14 @@ abstract class LiftCode extends Transform with TypingTransformers {
val tpe = tpe0.normalize
val tsym = tpe.typeSymbol
if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
- Select(reifySymRef(tpe.typeSymbol), "asTypeConstructor")
+ Select(reifySymRef(tpe.typeSymbol), nme.asTypeConstructor)
else tpe match {
case t @ NoType =>
reifyMirrorObject(t)
case t @ NoPrefix =>
reifyMirrorObject(t)
case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
- mirrorCall("thisModuleType", reify(clazz.fullName))
+ mirrorCall(nme.thisModuleType, reify(clazz.fullName))
case t @ RefinedType(parents, decls) =>
registerReifiableSymbol(tpe.typeSymbol)
mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol))
@@ -471,13 +381,20 @@ abstract class LiftCode extends Transform with TypingTransformers {
case This(_) if !(boundSyms contains tree.symbol) =>
reifyFree(tree)
case Ident(_) if !(boundSyms contains tree.symbol) =>
- reifyFree(tree)
+ if (tree.symbol.isVariable && tree.symbol.owner.isTerm) {
+ captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here.
+ mirrorCall("Select", reifyFree(tree), reifyName(nme.elem))
+ } else reifyFree(tree)
case tt: TypeTree if (tt.tpe != null) =>
if (!(boundSyms exists (tt.tpe contains _))) mirrorCall("TypeTree", reifyType(tt.tpe))
else if (tt.original != null) reify(tt.original)
- else mirrorCall("TypeTree")
+ else mirrorCall(nme.TypeTree)
+ case ta @ TypeApply(hk, ts) =>
+ val thereAreOnlyTTs = ts collect { case t if !t.isInstanceOf[TypeTree] => t } isEmpty;
+ val ttsAreNotEssential = ts collect { case tt: TypeTree => tt } find { tt => tt.original != null } isEmpty;
+ if (thereAreOnlyTTs && ttsAreNotEssential) reifyTree(hk) else reifyProduct(ta)
case global.emptyValDef =>
- mirrorSelect("emptyValDef")
+ mirrorSelect(nme.emptyValDef)
case _ =>
if (tree.isDef)
boundSyms += tree.symbol
@@ -487,8 +404,8 @@ abstract class LiftCode extends Transform with TypingTransformers {
if (tree.isDef || tree.isInstanceOf[Function])
registerReifiableSymbol(tree.symbol)
if (tree.hasSymbol)
- rtree = Apply(Select(rtree, "setSymbol"), List(reifySymRef(tree.symbol)))
- Apply(Select(rtree, "setType"), List(reifyType(tree.tpe)))
+ rtree = Apply(Select(rtree, nme.setSymbol), List(reifySymRef(tree.symbol)))
+ Apply(Select(rtree, nme.setType), List(reifyType(tree.tpe)))
*/
}
@@ -497,7 +414,7 @@ abstract class LiftCode extends Transform with TypingTransformers {
* to a global value, or else a mirror Literal.
*/
private def reifyFree(tree: Tree): Tree =
- mirrorCall("Ident", reifySymRef(tree.symbol))
+ mirrorCall(nme.Ident, reifySymRef(tree.symbol))
// todo: consider whether we should also reify positions
private def reifyPosition(pos: Position): Tree =
@@ -527,7 +444,7 @@ abstract class LiftCode extends Transform with TypingTransformers {
case sym: Symbol => reifySymRef(sym)
case tpe: Type => reifyType(tpe)
case xs: List[_] => reifyList(xs)
- case xs: Array[_] => scalaFactoryCall("Array", xs map reify: _*)
+ case xs: Array[_] => scalaFactoryCall(nme.Array, xs map reify: _*)
case scope: Scope => reifyScope(scope)
case x: Name => reifyName(x)
case x: Position => reifyPosition(x)
@@ -559,7 +476,7 @@ abstract class LiftCode extends Transform with TypingTransformers {
private def typePath(fullname: String): Tree = path(fullname, newTypeName)
private def mirrorAlias =
- ValDef(NoMods, mirrorShortName, TypeTree(), termPath(mirrorFullName))
+ ValDef(NoMods, nme.MIRROR_SHORT, TypeTree(), termPath(fullnme.MirrorPackage))
/**
* Generate code that generates a symbol table of all symbols registered in `reifiableSyms`
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 212785a525..99b0a82690 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -102,7 +102,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = {
ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args))
- emptyEnv ++ (sym.info.typeParams zip args filter (kv => isSpecialized(kv._1)))
+ emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, v) => isSpecialized(k))
}
/** Does typeenv `t1` include `t2`? All type variables in `t1`
@@ -255,7 +255,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- val args1 = (args zip sym.typeParams) map {
+ val args1 = map2(args, sym.typeParams) {
case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
case (tp, _) => tp
}
@@ -341,7 +341,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
// zip the keys with each permutation to create a TypeEnv
- loop(keys map concreteTypes) map (keys zip _ toMap)
+ loop(keys map concreteTypes) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -407,7 +407,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def typeParamSubAnyRef(sym: Symbol, cls: Symbol) = (
anyrefSpecCache.getOrElseUpdate(sym,
- cls.newTypeParameter(sym.pos, newTypeName(sym.name + "$sp"))
+ cls.newTypeParameter(sym.pos, sym.name append nme.SPECIALIZED_SUFFIX_NAME toTypeName)
setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
).tpe
)
@@ -445,7 +445,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def produceTypeParameters(syms: List[Symbol], nowner: Symbol, env: TypeEnv) = {
val cloned = for (s <- syms) yield if (!env.contains(s)) s.cloneSymbol(nowner) else env(s).typeSymbol
// log("producing type params: " + cloned.map(t => (t, t.tpe.bounds.hi)))
- for ((orig, cln) <- syms zip cloned) {
+ foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
if (env.contains(orig))
cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
@@ -572,8 +572,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// resolved by the type checker. Later on, erasure re-typechecks everything and
// chokes if it finds default parameters for specialized members, even though
// they are never needed.
- sym.info.paramss.flatten foreach (_.resetFlag(DEFAULTPARAM))
-
+ mapParamss(sym)(_ resetFlag DEFAULTPARAM)
decls1.enter(subst(fullEnv)(sym))
}
@@ -889,7 +888,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
- for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams)) {
+ foreach2(overridden.info.typeParams, overriding.info.typeParams) { (baseTvar, derivedTvar) =>
val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet
if (missing.nonEmpty) {
reporter.error(derivedTvar.pos,
@@ -1235,7 +1234,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = vparamss map (_ map (_.symbol))
+ parameters(tree.symbol) = mmap(vparamss)(_.symbol)
concreteSpecMethods -= tree.symbol
} // no need to descend further down inside method bodies
@@ -1391,9 +1390,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
- val parents1 = currentOwner.info.parents.zipWithIndex.map {
- case (tpe, idx) => TypeTree(tpe) setPos parents(idx).pos
- }
+ val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) =>
+ TypeTree(tpe) setPos parent.pos)
+
treeCopy.Template(tree,
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
@@ -1609,7 +1608,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (m.isClassConstructor) {
val origParamss = parameters(info(m).target)
val vparams = (
- for ((tp, sym) <- m.info.paramTypes zip origParamss(0)) yield (
+ map2(m.info.paramTypes, origParamss(0))((tp, sym) =>
m.newValue(sym.pos, specializedName(sym, typeEnv(cls)))
.setInfo(tp)
.setFlag(sym.flags)
@@ -1625,7 +1624,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
// ctor
- mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), List(vparams) map (_ map ValDef), EmptyTree))
+ mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
} else {
mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
}
@@ -1671,7 +1670,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
private def forwardCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
- val argss = paramss map (_ map (x => Ident(x.symbol)))
+ val argss = mmap(paramss)(x => Ident(x.symbol))
atPos(pos) { (receiver /: argss) (Apply) }
}
@@ -1702,22 +1701,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* - there is a getter for the original (non-specialized) field in the same class
* - there is a getter for the specialized field in the same class
*/
- def initializesSpecializedField(f: Symbol): Boolean =
- (f.name.endsWith("$sp")
- && clazz.info.member(nme.originalName(f.name)).isPublic
- && (clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol))
+ def initializesSpecializedField(f: Symbol) = (
+ (f.name endsWith nme.SPECIALIZED_SUFFIX_NAME)
+ && clazz.info.member(nme.originalName(f.name)).isPublic
+ && clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
+ )
- val argss = paramss map (_ map (x =>
+ val argss = mmap(paramss)(x =>
if (initializesSpecializedField(x.symbol))
gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe)
else
- Ident(x.symbol))
+ Ident(x.symbol)
)
atPos(pos) { (receiver /: argss) (Apply) }
}
/** Concrete methods that use a specialized type, or override such methods. */
- private val concreteSpecMethods: mutable.Set[Symbol] = new mutable.HashSet
+ private val concreteSpecMethods = new mutable.HashSet[Symbol]()
/** Add method m to the set of symbols for which we need an implementation tree
* in the tree transformer.
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index ca16e491e2..e2cd0a8402 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -128,7 +128,7 @@ abstract class TailCalls extends Transform {
* the label field.
*/
this.label = {
- val label = method.newLabel(method.pos, "_" + method.name)
+ val label = method.newLabel(method.pos, newTermName("_" + method.name))
val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 91ac00d946..adb408f7e4 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -291,46 +291,90 @@ abstract class UnCurry extends InfoTransform
val substParam = new TreeSymSubstituter(List(vparam), List(idparam))
def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
+ // waiting here until we can mix case classes and extractors reliably (i.e., when virtpatmat becomes the default)
+ // object VirtPatmatOpt {
+ // object Last {
+ // def unapply[T](xs: List[T]) = xs.lastOption
+ // }
+ // // keep this in synch by what's generated by combineCases/runOrElse
+ // object MatcherBlock {
+ // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat??
+ // case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats)
+ // case _ => None
+ // }
+ // }
+ // // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily
+ // // case Block(Last(P)) =>
+ // // case P =>
+ // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match {
+ // case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree])
+ // case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner))
+ // case b => treeBrowser browse b; None
+ // }
+ // }
+
+ // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly
+ def dupMatch(selector: Tree, cases: List[CaseDef], wrap: Match => Tree = identity) = {
+ def transformCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, Literal(Constant(true)))
+ def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+
+ gen.mkUncheckedMatch(
+ if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
+ else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate)
+ )
+ }
+
+ def dupVirtMatch(zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], wrap: Block => Tree = identity) = {
+ object dropMatchResAssign extends Transformer {
+ // override val treeCopy = newStrictTreeCopier // will duplicate below
+ override def transform(tree: Tree): Tree = tree match {
+ // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
+ case Block(List(matchRes, ass@Assign(keepGoingLhs, falseLit)), zero) if keepGoingLhs.symbol eq keepGoing.symbol =>
+ Block(List(ass), zero)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
+ val idaBlock = wrap(Block(
+ zero ::
+ x ::
+ /* drop matchRes def */
+ keepGoing ::
+ statsNoMatchRes,
+ NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing`
+ ))
+ substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
+ }
+
DefDef(m, (fun.body: @unchecked) match {
case Match(selector, cases) =>
- def transformCase(cdef: CaseDef): CaseDef =
- substTree(CaseDef(cdef.pat.duplicate, cdef.guard.duplicate, Literal(Constant(true))))
- def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
-
- gen.mkUncheckedMatch(
- if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
- else Match(substTree(selector.duplicate), (cases map transformCase) :+ defaultCase)
- )
- // TODO: check tgt.tpe.typeSymbol isNonBottomSubclass MatchingStrategyClass
+ dupMatch(selector, cases)
+ case Block((vd: ValDef) :: Nil, Match(selector, cases)) => // can't factor this out using an extractor due to bugs in the old pattern matcher
+ dupMatch(selector, cases, m => Block(List(vd), m))
+ // virtpatmat -- TODO: find a better way to keep this in synch with the code generated by patmatvirtualizer
case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat =>
object noOne extends Transformer {
override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = tgt.tpe member "caseResult".toTermName
+ val one = tgt.tpe member newTermName("one")
override def transform(tree: Tree): Tree = tree match {
case Apply(fun, List(a)) if fun.symbol == one =>
// blow one's argument away since all we want to know is whether the match succeeds or not
// (the alternative, making `one` CBN, would entail moving away from Option)
- val zero = // must use subtyping (no need for equality thanks to covariance), as otherwise we miss types like `Any with Int`
- if (UnitClass.tpe <:< a.tpe) Literal(Constant())
- else if (BooleanClass.tpe <:< a.tpe) Literal(Constant(false))
- else if (FloatClass.tpe <:< a.tpe) Literal(Constant(0.0f))
- else if (DoubleClass.tpe <:< a.tpe) Literal(Constant(0.0d))
- else if (ByteClass.tpe <:< a.tpe) Literal(Constant(0.toByte))
- else if (ShortClass.tpe <:< a.tpe) Literal(Constant(0.toShort))
- else if (IntClass.tpe <:< a.tpe) Literal(Constant(0))
- else if (LongClass.tpe <:< a.tpe) Literal(Constant(0L))
- else if (CharClass.tpe <:< a.tpe) Literal(Constant(0.toChar))
- else {
- val tpA = a.tpe.normalize
- if (NullClass.tpe <:< tpA) NULL
- else gen.mkCast(NULL, tpA) // must cast, at least when a.tpe <:< NothingClass.tpe
- }
- Apply(fun.duplicate, List(zero))
+ Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe)))
case _ =>
super.transform(tree)
}
}
- substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member("isSuccess".toTermName)), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
+ substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform)))
+ // for the optimized version of virtpatmat
+ case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) if opt.virtPatmat =>
+ dupVirtMatch(zero, x, matchRes, keepGoing, stats)
+ case Block(outerStats, Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _)) if opt.virtPatmat => // can't factor this out using an extractor due to bugs in the old pattern matcher
+ dupVirtMatch(zero, x, matchRes, keepGoing, stats, m => Block(outerStats, m))
+ // case other =>
+ // treeBrowser browse other
})
}
@@ -374,7 +418,9 @@ abstract class UnCurry extends InfoTransform
assert(toArraySym != NoSymbol)
def getManifest(tp: Type): Tree = {
val manifestOpt = localTyper.findManifest(tp, false)
- if (!manifestOpt.tree.isEmpty) manifestOpt.tree
+ // Don't want bottom types getting any further than this (SI-4024)
+ if (tp.typeSymbol.isBottomClass) getManifest(AnyClass.tpe)
+ else if (!manifestOpt.tree.isEmpty) manifestOpt.tree
else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi)
else localTyper.getManifestTree(tree.pos, tp, false)
}
@@ -406,7 +452,7 @@ abstract class UnCurry extends InfoTransform
atPhase(phase.next) {
if (isJava && isPrimitiveArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
suffix = localTyper.typedPos(pos) {
- gen.mkRuntimeCall("toObjectArray", List(suffix))
+ gen.mkRuntimeCall(nme.toObjectArray, List(suffix))
}
}
}
@@ -415,7 +461,7 @@ abstract class UnCurry extends InfoTransform
val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args
- (formals, args1).zipped map { (formal, arg) =>
+ map2(formals, args1) { (formal, arg) =>
if (!isByNameParamType(formal)) {
arg
} else if (isByNameRef(arg)) {
@@ -513,6 +559,7 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
val sym = tree.symbol
+ if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
// a local variable that is mutable and free somewhere later should be lifted
// as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
@@ -724,7 +771,7 @@ abstract class UnCurry extends InfoTransform
case p => p.symbol.tpe
}
val forwresult = dd.symbol.tpe.finalResultType
- val forwformsyms = (forwformals, flatparams).zipped map ((tp, oldparam) =>
+ val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
currentClass.newValueParameter(oldparam.symbol.pos, oldparam.name).setInfo(tp)
)
def mono = MethodType(forwformsyms, forwresult)
@@ -742,7 +789,7 @@ abstract class UnCurry extends InfoTransform
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = (forwsym ARGS, flatparams).zipped map {
+ val locals = map2(forwsym ARGS, flatparams) {
case (_, fp) if !rpsymbols(fp.symbol) => null
case (argsym, fp) =>
Block(Nil,
@@ -752,7 +799,7 @@ abstract class UnCurry extends InfoTransform
)
)
}
- val seqargs = (locals, forwsym ARGS).zipped map {
+ val seqargs = map2(locals, forwsym ARGS) {
case (null, argsym) => Ident(argsym)
case (l, _) => l
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 031be21f24..3536608efd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -69,7 +69,7 @@ abstract class Duplicators extends Analyzer {
* tree, except for TypeTrees, are erased prior to type checking. TypeTrees
* are fixed by substituting invalid symbols for the new ones.
*/
- class BodyDuplicator(context: Context) extends Typer(context: Context) {
+ class BodyDuplicator(_context: Context) extends Typer(_context) {
class FixInvalidSyms extends TypeMap {
@@ -248,7 +248,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
- if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE)
+ //if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
vdef.tpt.tpe = fixType(vdef.tpt.tpe)
vdef.tpe = null
super.typed(vdef, mode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 92be241951..3b90eaeed7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -214,10 +214,10 @@ trait Implicits {
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
+ val dummyMethod = new TermSymbol(NoSymbol, NoPosition, newTermName("typer$dummy"))
+ def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
+
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
- def templateArgType(argtpe: Type) =
- new BoundedWildcardType(TypeBounds(argtpe, AnyClass.tpe))
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "typer$dummy")
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
}
@@ -740,7 +740,7 @@ trait Implicits {
)
private def isIneligible(info: ImplicitInfo) = (
info.isCyclicOrErroneous
- || isView && isConforms(info.sym)
+ || isView && isPredefMemberNamed(info.sym, nme.conforms)
|| isShadowed(info.name)
)
@@ -760,15 +760,6 @@ trait Implicits {
*/
private def checkValid(sym: Symbol) = isValid(sym) || { invalidImplicits += sym ; false }
- /** Is `sym` the standard conforms method in Predef?
- * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def`
- * which does a member lookup (it can't be a lazy val because we might reload Predef
- * during resident compilations).
- */
- private def isConforms(sym: Symbol) = (
- (sym.name == nme.conforms) && (sym.owner == PredefModule.moduleClass)
- )
-
/** Preventing a divergent implicit from terminating implicit search,
* so that if there is a best candidate it can still be selected.
*/
@@ -825,7 +816,14 @@ trait Implicits {
val newPending = undoLog undo {
is filterNot (alt => alt == i || {
try improves(i, alt)
- catch { case e: CyclicReference => true }
+ catch {
+ case e: CyclicReference =>
+ if (printInfers) {
+ println(i+" discarded because cyclic reference occurred")
+ e.printStackTrace()
+ }
+ true
+ }
})
}
rankImplicits(newPending, i :: acc)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 79db9ab000..295b66b17f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -9,7 +9,6 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
-import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
import symtab.Flags._
import scala.annotation.tailrec
@@ -180,7 +179,7 @@ trait Infer {
case NullaryMethodType(restpe) =>
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
- ExistentialType(tparams, normalize(qtpe))
+ newExistentialType(tparams, normalize(qtpe))
case tp1 =>
tp1 // @MAT aliases already handled by subtyping
}
@@ -459,13 +458,14 @@ trait Infer {
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
- (tparams, tvars).zipped map ((tparam, tvar) =>
+ map2(tparams, tvars)((tparam, tvar) =>
instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
else
tvars map (tvar => WildcardType)
}
object AdjustedTypeArgs {
+ val Result = collection.mutable.LinkedHashMap
type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
@@ -508,24 +508,27 @@ trait Infer {
* type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
- @inline def keep(targ: Type, tparam: Symbol) = (
- targ.typeSymbol != NothingClass // definitely not retracting, it's not Nothing!
- || (!restpe.isWildcard && (varianceInType(restpe)(tparam) & COVARIANT) != 0)) // occured covariantly --> don't retract
-
- @inline def adjusted(targ: Type, tvar: TypeVar) =
- if (targ.typeSymbol == RepeatedParamClass)
- targ.baseType(SeqClass)
- else if (targ.typeSymbol == JavaRepeatedParamClass)
- targ.baseType(ArrayClass)
- // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
- else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden))
- targ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else
- targ.widen
+ val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
+
+ foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
+ val retract = (
+ targ.typeSymbol == NothingClass // only retract Nothings
+ && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ )
- (tparams, tvars, targs).zipped.map { (tparam, tvar, targ) =>
- tparam -> (if(keep(targ, tparam)) Some(adjusted(targ, tvar)) else None)
- }(collection.breakOut)
+ // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
+ buf += ((tparam,
+ if (retract) None
+ else Some(
+ if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
+ else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
+ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
+ else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+ else targ.widen
+ )
+ ))
+ }
+ buf.result
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -584,7 +587,7 @@ trait Infer {
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
- (argtpes, formals).zipped map { (argtpe, formal) =>
+ map2(argtpes, formals) { (argtpe, formal) =>
val tp1 = argtpe.deconst.instantiateTypeParams(tparams, tvars)
val pt1 = formal.instantiateTypeParams(tparams, tvars)
@@ -756,7 +759,8 @@ trait Infer {
typesCompatible(reorderArgs(argtpes1, argPos))
)
}
- } else {
+ }
+ else {
// not enough arguments, check if applicable using defaults
val missing = missingParams[Type](argtpes0, params, {
case NamedType(name, _) => Some(name)
@@ -994,39 +998,13 @@ trait Infer {
}
}
-
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- // @M TODO this method is duplicated all over the place (varianceString)
- def varStr(s: Symbol): String =
- if (s.isCovariant) "covariant"
- else if (s.isContravariant) "contravariant"
- else "invariant";
-
- def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
- if((a0 eq b0) || (a0.owner eq b0.owner)) ""
- else {
- var a = a0; var b = b0
- while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
- if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
- }
- }
-
- val errors = checkKindBounds0(tparams, targs, pre, owner, true)
- val errorMessages = new ListBuffer[String]
- errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages +=
- (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
- (for ((a, p) <- arityMismatches)
- yield a+qualify(a,p)+ " has "+countElementsAsString(a.typeParams.length, "type parameter")+", but "+
- p+qualify(p,a)+" has "+countAsString(p.typeParams.length)).toList.mkString(", ") +
- (for ((a, p) <- varianceMismatches)
- yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
- p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
- (for ((a, p) <- stricterBounds)
- yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
- p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", "))
+ checkKindBounds0(tparams, targs, pre, owner, true) map {
+ case (targ, tparam, kindErrors) =>
+ kindErrors.errorMessage(targ, tparam)
}
- errorMessages.toList
}
+
/** Substitute free type variables `undetparams` of polymorphic argument
* expression `tree`, given two prototypes `strictPt`, and `lenientPt`.
* `strictPt` is the first attempt prototype where type parameters
@@ -1311,7 +1289,11 @@ trait Infer {
case TypeRef(_, sym, _) if isLocalBinding(sym) =>
;
case _ =>
- patternWarning(arg, "non variable type-argument ")
+ // Want to warn about type arguments, not type parameters. Otherwise we'll
+ // see warnings about "invisible" types, like: val List(x0) = x1 leading to "non
+ // variable type-argument A in type pattern List[A]..."
+ if (!arg.typeSymbol.isTypeParameterOrSkolem)
+ patternWarning(arg, "non variable type-argument ")
}
}
}
@@ -1465,8 +1447,17 @@ trait Infer {
/** A traverser to collect type parameters referred to in a type
*/
object freeTypeParamsOfTerms extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean =
- sym.isAbstractType && sym.owner.isTerm
+ // An inferred type which corresponds to an unknown type
+ // constructor creates a file/declaration order-dependent crasher
+ // situation, the behavior of which depends on the state at the
+ // time the typevar is created. Until we can deal with these
+ // properly, we can avoid it by ignoring type parameters which
+ // have type constructors amongst their bounds. See SI-4070.
+ protected def includeCondition(sym: Symbol) = (
+ sym.isAbstractType
+ && sym.owner.isTerm
+ && !sym.info.bounds.exists(_.typeParams.nonEmpty)
+ )
}
/** A traverser to collect type parameters referred to in a type
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 99ba0e0971..b9264aae55 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -7,13 +7,10 @@ trait Macros { self: Analyzer =>
import global._
import definitions._
- def macroMethName(name: Name) =
- newTermName((if (name.isTypeName) "type" else "def") + "macro$" + name)
-
def macroMeth(mac: Symbol): Symbol = {
var owner = mac.owner
if (!owner.isModuleClass) owner = owner.companionModule.moduleClass
- owner.info.decl(macroMethName(mac.name))
+ owner.info.decl(nme.macroMethodName(mac.name))
}
/**
@@ -37,21 +34,21 @@ trait Macros { self: Analyzer =>
def macroMethDef(mdef: DefDef): Tree = {
def paramDef(name: Name, tpt: Tree) = ValDef(Modifiers(PARAM), name, tpt, EmptyTree)
val universeType = TypeTree(ReflectApiUniverse.tpe)
- val globParamSec = List(paramDef("glob", universeType))
- def globSelect(name: Name) = Select(Ident("glob"), name)
+ val globParamSec = List(paramDef(nme.glob, universeType))
+ def globSelect(name: Name) = Select(Ident(nme.glob), name)
def globTree = globSelect(newTypeName("Tree"))
def globType = globSelect(newTypeName("Type"))
- val thisParamSec = if (mdef.symbol.owner.isModuleClass) List() else List(paramDef("_this", globTree))
+ val thisParamSec = if (mdef.symbol.owner.isModuleClass) List() else List(paramDef(newTermName("_this"), globTree))
def tparamInMacro(tdef: TypeDef) = paramDef(tdef.name.toTermName, globType)
def vparamInMacro(vdef: ValDef): ValDef = paramDef(vdef.name, globTree)
def wrapImplicit(tree: Tree) = atPos(tree.pos) {
- Block(List(ValDef(Modifiers(IMPLICIT), "$glob", universeType, Ident("glob"))), tree)
+ Block(List(ValDef(Modifiers(IMPLICIT), newTermName("$" + nme.glob), universeType, Ident(nme.glob))), tree)
}
atPos(mdef.pos) {
new DefDef( // can't call DefDef here; need to find out why
mods = mdef.mods &~ MACRO,
- name = macroMethName(mdef.name),
+ name = nme.macroMethodName(mdef.name),
tparams = List(),
vparamss = globParamSec :: thisParamSec :: (mdef.tparams map tparamInMacro) ::
(mdef.vparamss map (_ map vparamInMacro)),
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index d75e119fd7..62393befd2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -7,22 +7,7 @@ package typechecker
import symtab.Flags._
import scala.collection.{ mutable, immutable }
-
-object listutil {
- def mexists[T](xss: List[List[T]])(p: T => Boolean) =
- xss exists (_ exists p)
- def mmap[T, U](xss: List[List[T]])(f: T => U) =
- xss map (_ map f)
- def mforeach[T](xss: List[List[T]])(f: T => Unit) =
- xss foreach (_ foreach f)
- def mfind[T](xss: List[List[T]])(p: T => Boolean): Option[T] = {
- for (xs <- xss; x <- xs)
- if (p(x)) return Some(x)
- None
- }
- def mfilter[T](xss: List[List[T]])(p: T => Boolean) =
- for (xs <- xss; x <- xs; if p(x)) yield x
-}
+import scala.tools.util.StringOps.{ ojoin }
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
@@ -166,8 +151,9 @@ trait MethodSynthesis {
}
}
private def logDerived(result: Tree): Tree = {
- val id = List(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName) filterNot (_ == "") mkString " "
- log("[+derived] " + id + " (" + derivedSym + ")\n " + result)
+ log("[+derived] " + ojoin(mods.defaultFlagString, basisSym.accurateKindString, basisSym.getterName.decode)
+ + " (" + derivedSym + ")\n " + result)
+
result
}
final def derive(initial: List[AnnotationInfo]): Tree = {
@@ -262,7 +248,7 @@ trait MethodSynthesis {
}
sealed abstract class BeanAccessor(bean: String) extends DerivedFromValDef {
- def name = bean + tree.name.toString.capitalize
+ val name = newTermName(bean + tree.name.toString.capitalize)
def flagsMask = BeanPropertyFlags
def flagsExtra = 0
override def derivedSym = enclClass.info decl name
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 0f57285480..200191fa13 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -99,7 +99,7 @@ trait Namers extends MethodSynthesis {
}
def enterValueParams(vparamss: List[List[ValDef]]): List[List[Symbol]] = {
- listutil.mmap(vparamss) { param =>
+ mmap(vparamss) { param =>
val sym = assignSymbol(param, param.name, mask = ValueParameterFlags)
setPrivateWithin(param, sym)
enterInScope(sym)
@@ -422,7 +422,7 @@ trait Namers extends MethodSynthesis {
* @return the companion object symbol.
*/
def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
- val m = companionModuleOf(cdef.symbol, context)
+ val m = companionSymbolOf(cdef.symbol, context)
// @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
// otherwise. documentation welcome.
//
@@ -522,13 +522,13 @@ trait Namers extends MethodSynthesis {
val vparamss = tree match { case x: DefDef => x.vparamss ; case _ => Nil }
val cparamss = constructorType.paramss
- for ((vparams, cparams) <- vparamss zip cparamss) {
- for ((param, cparam) <- vparams zip cparams) {
+ map2(vparamss, cparamss)((vparams, cparams) =>
+ map2(vparams, cparams)((param, cparam) =>
// need to clone the type cparam.tpe???
// problem is: we don't have the new owner yet (the new param symbol)
param.tpt setType subst(cparam.tpe)
- }
- }
+ )
+ )
}
sym setInfo {
mkTypeCompleter(tree) { copySym =>
@@ -579,7 +579,7 @@ trait Namers extends MethodSynthesis {
// via "x$lzy" as can be seen in test #3927.
val sym = (
if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.pos, tree.name + "$lzy") setFlag tree.mods.flags resetFlag IMPLICIT
+ else owner.newValue(tree.pos, tree.name append nme.LAZY_LOCAL) setFlag tree.mods.flags resetFlag IMPLICIT
)
enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
}
@@ -627,7 +627,7 @@ trait Namers extends MethodSynthesis {
classOfModuleClass(m.moduleClass) = new WeakReference(tree)
}
val hasDefault = impl.body exists {
- case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => listutil.mexists(vparamss)(_.mods.hasDefault)
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
case _ => false
}
if (hasDefault) {
@@ -855,7 +855,7 @@ trait Namers extends MethodSynthesis {
// @check: this seems to work only if the type completer of the class runs before the one of the
// module class: the one from the module class removes the entry from classOfModuleClass (see above).
if (clazz.isClass && !clazz.hasModuleFlag) {
- val modClass = companionModuleOf(clazz, context).moduleClass
+ val modClass = companionSymbolOf(clazz, context).moduleClass
Namers.this.classOfModuleClass get modClass map { cdefRef =>
val cdef = cdefRef()
@@ -953,9 +953,9 @@ trait Namers extends MethodSynthesis {
// def overriddenSymbol = meth.nextOverriddenSymbol
// fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || listutil.mexists(vparamss)(_.tpt.isEmpty))) {
+ if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
// try to complete from matching definition in base type
- listutil.mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
val overridden = overriddenSymbol
if (overridden != NoSymbol && !overridden.isOverloaded) {
overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
@@ -993,7 +993,7 @@ trait Namers extends MethodSynthesis {
_.info.isInstanceOf[MethodType])) {
vparamSymss = List(List())
}
- listutil.mforeach(vparamss) { vparam =>
+ mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
context.error(vparam.pos, "missing parameter type")
vparam.tpt defineType ErrorType
@@ -1073,7 +1073,7 @@ trait Namers extends MethodSynthesis {
// Create trees for the defaultGetter. Uses tools from Unapplies.scala
var deftParams = tparams map copyUntyped[TypeDef]
- val defvParamss = listutil.mmap(previous) { p =>
+ val defvParamss = mmap(previous) { p =>
// in the default getter, remove the default parameter
val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
UnTyper.traverse(p1)
@@ -1082,7 +1082,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionModuleOf(clazz, context)
+ val module = companionSymbolOf(clazz, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
classAndNamerOfModule get module match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index e4ebe13217..a8dfea02ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -189,7 +189,7 @@ trait NamesDefaults { self: Analyzer =>
if (pre == NoType) {
None
} else {
- val module = companionModuleOf(baseFun.symbol.owner, context)
+ val module = companionSymbolOf(baseFun.symbol.owner, context)
if (module == NoSymbol) None
else {
val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
@@ -260,7 +260,7 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
- val symPs = (args, paramTypes).zipped map ((arg, tpe) => {
+ val symPs = map2(args, paramTypes)((arg, tpe) => {
val byName = isByNameParamType(tpe)
val (argTpe, repeated) =
if (isScalaRepeatedParamType(tpe)) arg match {
@@ -276,7 +276,7 @@ trait NamesDefaults { self: Analyzer =>
s.setInfo(valType)
(context.scope.enter(s), byName, repeated)
})
- (symPs, args).zipped map {
+ map2(symPs, args) {
case ((sym, byName, repeated), arg) =>
val body =
if (byName) {
@@ -326,13 +326,15 @@ trait NamesDefaults { self: Analyzer =>
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => {
+ val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
val ref = gen.mkAttributedRef(vDef.symbol)
atPos(vDef.pos.focus) {
// for by-name parameters, the local value is a nullary function returning the argument
- if (isByNameParamType(tpe)) Apply(ref, List())
- else if (isScalaRepeatedParamType(tpe)) Typed(ref, Ident(tpnme.WILDCARD_STAR))
- else ref
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
@@ -340,7 +342,7 @@ trait NamesDefaults { self: Analyzer =>
res.setPos(res.pos.makeTransparent)
val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos)
context.namedApplyBlockInfo =
- Some((block, NamedApplyInfo(qual, targs, vargss ::: List(refArgs), blockTyper)))
+ Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
}
}
@@ -414,7 +416,7 @@ trait NamesDefaults { self: Analyzer =>
if (i > 0) {
val defGetterName = nme.defaultGetterName(param.owner.name, i)
if (param.owner.isConstructor) {
- val mod = companionModuleOf(param.owner.owner, context)
+ val mod = companionSymbolOf(param.owner.owner, context)
mod.info.member(defGetterName)
}
else {
@@ -430,6 +432,80 @@ trait NamesDefaults { self: Analyzer =>
}
} else NoSymbol
}
+
+ private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
+ val savedParams = context.extractUndetparams()
+ val savedReporting = context.reportAmbiguousErrors
+
+ context.reportAmbiguousErrors = false
+ try fn(savedParams)
+ finally {
+ context.reportAmbiguousErrors = savedReporting
+ //@M note that we don't get here when an ambiguity was detected (during the computation of res),
+ // as errorTree throws an exception
+ context.undetparams = savedParams
+ }
+ }
+
+ /** Fast path for ambiguous assignment check.
+ */
+ private def isNameInScope(context: Context, name: Name) = (
+ context.enclosingContextChain exists (ctx =>
+ (ctx.scope.lookupEntry(name) != null)
+ || (ctx.owner.rawInfo.member(name) != NoSymbol)
+ )
+ )
+
+ /** A full type check is very expensive; let's make sure there's a name
+ * somewhere which could potentially be ambiguous before we go that route.
+ */
+ private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = {
+ import typer.context
+ isNameInScope(context, param.name) && {
+ // for named arguments, check whether the assignment expression would
+ // typecheck. if it does, report an ambiguous error.
+ val paramtpe = param.tpe.cloneInfo(param)
+ // replace type parameters by wildcard. in the below example we need to
+ // typecheck (x = 1) with wildcard (not T) so that it succeeds.
+ // def f[T](x: T) = x
+ // var x = 0
+ // f(x = 1) << "x = 1" typechecks with expected type WildcardType
+ savingUndeterminedTParams(context) { udp =>
+ val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
+ override def apply(tp: Type): Type = super.apply(tp match {
+ case TypeRef(_, ByNameParamClass, x :: Nil) => x
+ case _ => tp
+ })
+ }
+ // This throws an exception which is caught in `tryTypedApply` (as it
+ // uses `silent`) - unfortunately, tryTypedApply recovers from the
+ // exception if you use errorTree(arg, ...) and conforms is allowed as
+ // a view (see tryImplicit in Implicits) because it tries to produce a
+ // new qualifier (if the old one was P, the new one will be
+ // conforms.apply(P)), and if that works, it pretends nothing happened.
+ //
+ // To make sure tryTypedApply fails, we would like to pass EmptyTree
+ // instead of arg, but can't do that because eventually setType(ErrorType)
+ // is called, and EmptyTree can only be typed NoType. Thus we need to
+ // disable conforms as a view...
+ try typer.silent(_.typed(arg, subst(paramtpe))) match {
+ case t: Tree => !t.isErroneous
+ case _ => false
+ }
+ catch {
+ // `silent` only catches and returns TypeErrors which are not
+ // CyclicReferences. Fix for #3685
+ case cr @ CyclicReference(sym, _) =>
+ (sym.name == param.name) && sym.accessedOrSelf.isVariable && {
+ context.error(sym.pos,
+ "variable definition needs type because '%s' is used as a named argument in its body.".format(sym.name))
+ typer.infer.setError(arg)
+ true
+ }
+ }
+ }
+ }
+ }
/**
* Removes name assignments from args. Additionally, returns an array mapping
@@ -439,71 +515,38 @@ trait NamesDefaults { self: Analyzer =>
* after named ones.
*/
def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = {
- import typer.infer.errorTree
-
- // maps indicies from (order written by user) to (order of definition)
- val argPos = (new Array[Int](args.length)) map (x => -1)
+ import typer.context
+ // maps indices from (order written by user) to (order of definition)
+ val argPos = Array.fill(args.length)(-1)
var positionalAllowed = true
- val namelessArgs = for ((arg, index) <- (args.zipWithIndex)) yield arg match {
- case a @ AssignOrNamedArg(Ident(name), rhs) =>
- val (pos, newName) = paramPos(params, name)
- newName.foreach(n => {
- typer.context.unit.deprecationWarning(arg.pos, "the parameter name "+ name +" has been deprecated. Use "+ n +" instead.")
- })
- if (pos == -1) {
- if (positionalAllowed) {
- argPos(index) = index
- // prevent isNamed from being true when calling doTypedApply recursively,
- // treat the arg as an assignment of type Unit
- Assign(a.lhs, rhs).setPos(arg.pos)
- } else {
- errorTree(arg, "unknown parameter name: "+ name)
- }
- } else if (argPos contains pos) {
- errorTree(arg, "parameter specified twice: "+ name)
- } else {
- // for named arguments, check whether the assignment expression would
- // typecheck. if it does, report an ambiguous error.
- val param = params(pos)
- val paramtpe = params(pos).tpe.cloneInfo(param)
- // replace type parameters by wildcard. in the below example we need to
- // typecheck (x = 1) with wildcard (not T) so that it succeeds.
- // def f[T](x: T) = x
- // var x = 0
- // f(x = 1) << "x = 1" typechecks with expected type WildcardType
- val udp = typer.context.extractUndetparams()
- val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = tp match {
- case TypeRef(_, ByNameParamClass, List(arg)) => super.apply(arg)
- case _ => super.apply(tp)
+ val namelessArgs = mapWithIndex(args) { (arg, index) =>
+ def fail(msg: String) = typer.infer.errorTree(arg, msg)
+ arg match {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ def matchesName(param: Symbol) = !param.isSynthetic && (
+ (param.name == name) || (param.deprecatedParamName match {
+ case Some(`name`) =>
+ context.unit.deprecationWarning(arg.pos,
+ "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
+ true
+ case _ => false
+ })
+ )
+ val pos = params indexWhere matchesName
+ if (pos == -1) {
+ if (positionalAllowed) {
+ argPos(index) = index
+ // prevent isNamed from being true when calling doTypedApply recursively,
+ // treat the arg as an assignment of type Unit
+ Assign(arg.lhs, rhs) setPos arg.pos
}
+ else fail("unknown parameter name: " + name)
}
- val reportAmbiguousErrors = typer.context.reportAmbiguousErrors
- typer.context.reportAmbiguousErrors = false
-
- var variableNameClash = false
- val typedAssign = try {
- typer.silent(_.typed(arg, subst(paramtpe)))
- } catch {
- // `silent` only catches and returns TypeErrors which are not
- // CyclicReferences. Fix for #3685
- case cr @ CyclicReference(sym, info) if sym.name == param.name =>
- if (sym.isVariable || sym.isGetter && sym.accessed.isVariable) {
- // named arg not allowed
- variableNameClash = true
- typer.context.error(sym.pos,
- "%s definition needs %s because '%s' is used as a named argument in its body.".format(
- "variable", // "method"
- "type", // "result type"
- sym.name
- )
- )
- typer.infer.setError(arg)
- }
- else cr
- }
-
- def applyNamedArg = {
+ else if (argPos contains pos)
+ fail("parameter specified twice: " + name)
+ else if (isAmbiguousAssignment(typer, params(pos), arg))
+ fail("reference to " + name + " is ambiguous; it is both a method parameter and a variable in scope.")
+ else {
// if the named argument is on the original parameter
// position, positional after named is allowed.
if (index != pos)
@@ -511,63 +554,13 @@ trait NamesDefaults { self: Analyzer =>
argPos(index) = pos
rhs
}
-
- val res = typedAssign match {
- case _: TypeError => applyNamedArg
-
- case t: Tree =>
- if (t.isErroneous && !variableNameClash) {
- applyNamedArg
- } else if (t.isErroneous) {
- t // name clash with variable. error was already reported above.
- } else {
- // This throws an exception which is caught in `tryTypedApply` (as it
- // uses `silent`) - unfortunately, tryTypedApply recovers from the
- // exception if you use errorTree(arg, ...) and conforms is allowed as
- // a view (see tryImplicit in Implicits) because it tries to produce a
- // new qualifier (if the old one was P, the new one will be
- // conforms.apply(P)), and if that works, it pretends nothing happened.
- //
- // To make sure tryTypedApply fails, we would like to pass EmptyTree
- // instead of arg, but can't do that because eventually setType(ErrorType)
- // is called, and EmptyTree can only be typed NoType. Thus we need to
- // disable conforms as a view...
- errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+
- "name of the method and the name of a variable currently in scope.")
- }
- }
-
- typer.context.reportAmbiguousErrors = reportAmbiguousErrors
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- typer.context.undetparams = udp
- res
- }
- case _ =>
- argPos(index) = index
- if (positionalAllowed) arg
- else errorTree(arg, "positional after named argument.")
- }
- (namelessArgs, argPos)
- }
-
- /**
- * Returns
- * - the position of the parameter named `name`
- * - optionally, if `name` is @deprecatedName, the new name
- */
- def paramPos(params: List[Symbol], name: Name): (Int, Option[Name]) = {
- var i = 0
- var rest = params
- while (!rest.isEmpty) {
- val p = rest.head
- if (!p.isSynthetic) {
- if (p.name == name) return (i, None)
- if (p.deprecatedParamName == Some(name)) return (i, Some(p.name))
+ case _ =>
+ argPos(index) = index
+ if (positionalAllowed) arg
+ else fail("positional after named argument.")
}
- i += 1
- rest = rest.tail
}
- (-1, None)
+
+ (namelessArgs, argPos)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index 23d855f7b3..440db4300c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -46,8 +46,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
import global._
import definitions._
- private lazy val matchingStrategyTycon = definitions.getClass("scala.MatchingStrategy").typeConstructor
-
class MatchTranslator(typer: Typer) extends MatchCodeGen {
def typed(tree: Tree, mode: Int, pt: Type): Tree = typer.typed(tree, mode, pt) // for MatchCodeGen -- imports don't provide implementations for abstract members
@@ -55,7 +53,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
import typeDebug.{ ptTree, ptBlock, ptLine }
def solveContextBound(contextBoundTp: Type): (Tree, Type) = {
- val solSym = NoSymbol.newTypeParameter(NoPosition, "SolveImplicit$".toTypeName)
+ val solSym = NoSymbol.newTypeParameter(NoPosition, newTypeName("SolveImplicit$"))
val param = solSym.setInfo(contextBoundTp.typeSymbol.typeParams(0).info.cloneInfo(solSym)) // TypeBounds(NothingClass.typeConstructor, baseTp)
val pt = appliedType(contextBoundTp, List(param.tpeHK))
val savedUndets = context.undetparams
@@ -67,7 +65,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
(result.tree, result.subst.to(result.subst.from indexOf param))
}
- lazy val (matchingStrategy, matchingMonadType) = solveContextBound(matchingStrategyTycon)
+ lazy val (matchingStrategy, matchingMonadType) = solveContextBound(MatchingStrategyClass.typeConstructor)
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -88,9 +86,9 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
val scrutSym = freshSym(scrut.pos, scrutType)
-
+ val okPt = repeatedToSeq(pt)
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- fixerUpper(context.owner, scrut.pos)(combineCases(scrut, scrutSym, cases map translateCase(scrutSym), repeatedToSeq(pt)))
+ combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, context.owner)
}
@@ -122,8 +120,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- (translatePattern(scrutSym, pattern) ++ translateGuard(guard), translateBody(body))
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
+ translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
}
def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
@@ -136,7 +134,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
if (!extractor.isTyped) throw new TypeError(pos, "Could not typecheck extractor call: "+ extractor)
- if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+ // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
// must use type `tp`, which is provided by extractor's result, not the type expected by binder,
// as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
@@ -239,16 +237,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
case Alternative(alts) =>
- val altTrees = alts map { alt =>
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- // `one(x) : T` where x is the binder before this pattern, which will be replaced by the binder for the alternative by TreeMaker.singleBinder below
- // T is the widened type of the previous binder -- this ascription is necessary to infer a clean type for `or` -- the alternative combinator -- in the presence of existential types
- // see pos/virtpatmat_exist1.scala
- combineExtractors(translatePattern(patBinder, alt), pmgen.one(CODE.REF(patBinder), patBinder.info.widen))
- }
-
- noFurtherSubPats(AlternativesTreeMaker(patBinder, altTrees : _*))
+ noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
/* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
case class Foo(x: Int, y: String)
@@ -277,26 +266,31 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
if (guard == EmptyTree) Nil
else List(GuardTreeMaker(guard))
- // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by pmgen.caseResult),
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by pmgen.one),
// so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
// to enable this, probably need to move away from Option to a monad specific to pattern-match,
// so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
// 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
// need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
- def translateBody(body: Tree): Tree = atPos(body.pos)(pmgen.caseResult(body, body.tpe))
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCall(unfun, args)
+ def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
+
+ def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ // THE PRINCIPLED SLOW PATH -- NOT USED
// generate a call to the (synthetically generated) extractor of a case class
// NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one)
// that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY),
// and replace that dummy by a reference to the actual binder in translateExtractorPattern
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
+ def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
// TODO: can we rework the typer so we don't have to do all this twice?
// undo rewrite performed in (5) of adapt
val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
@@ -342,25 +336,20 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}
}
- class ExtractorCall(extractorCallIncludingDummy: Tree, val args: List[Tree]) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+ abstract class ExtractorCall(val args: List[Tree]) {
+ val nbSubPats = args.length
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped
- def resultType = tpe.finalResultType
- def paramType = tpe.paramTypes.head
+ // everything okay, captain?
+ def isTyped : Boolean
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else {
- val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs
- // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply??
- if(monadArgs.length == 1) monadArgs(0)
- else ErrorType
- }
- }
+ def isSeq: Boolean
+ lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ // to which type should the previous binder be casted?
+ def paramType : Type
+
+ // binder has been casted to paramType if necessary
+ def treeMaker(binder: Symbol, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -374,15 +363,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
case bp => bp
}
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
- lazy val nbSubPats = args.length
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
- // the types for the binders corresponding to my subpatterns
- // subPatTypes != args map (_.tpe) since the args may have more specific types than the constructor's parameter types
- // replace last type (of shape Seq[A]) with RepeatedParam[A] so that formalTypes will
- // repeat the last argument type to align the formals with the number of arguments
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -390,75 +370,45 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
formalTypes(rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args), nbSubPats)
} else rawSubPatTypes
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
- val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
-
- val patTreeLifted =
- if (resultType.typeSymbol == BooleanClass) pmgen.cond(extractorApply)
- else extractorApply
-
- val binder = freshSym(pos, resultInMonad) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- val subpatRefs = if (subPatBinders isEmpty) Nil else subPatRefs(binder)
-
- lengthGuard(binder) match {
- case None => ExtractorTreeMaker(patTreeLifted, binder, Substitution(subPatBinders, subpatRefs))
- case Some(lenGuard) => FilteredExtractorTreeMaker(patTreeLifted, lenGuard, binder, Substitution(subPatBinders, subpatRefs))
- }
- }
-
- protected def spliceApply(binder: Symbol): Tree = {
- object splice extends Transformer {
- override def transform(t: Tree) = t match {
- case Apply(x, List(Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder)))
- case _ => super.transform(t)
- }
- }
- splice.transform(extractorCallIncludingDummy)
- }
-
- private lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
+ protected def rawSubPatTypes: List[Type]
- private def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- private def seqTp = rawSubPatTypes.last baseType SeqClass
- private lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- private lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- private lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- private lazy val minLenToCheck = if(lastIsStar) 1 else 0
- private def seqTree(binder: Symbol) = if(firstIndexingBinder == 0) CODE.REF(binder) else pmgen.tupleSel(binder)(firstIndexingBinder+1)
+ protected def seqTp = rawSubPatTypes.last baseType SeqClass
+ protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
+ protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
+ protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = pmgen.tupleSel(binder)(i)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- private def subPatRefs(binder: Symbol): List[Tree] = {
+ // require isSeq
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
// only relevant if isSeq: (here to avoid capturing too much in the returned closure)
val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
val nbIndexingIndices = indexingIndices.length
// this error is checked by checkStarPatOK
// if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ (((1 to firstIndexingBinder) map tupleSel(binder)) ++
+ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ (indexingIndices map pmgen.index(seqTree(binder))) ++
+ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
+ (if(!lastIsStar) Nil else List(
+ if(nbIndexingIndices == 0) seqTree(binder)
+ else pmgen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ }
- (if(isSeq) {
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- ((1 to firstIndexingBinder) map pmgen.tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map pmgen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else pmgen.drop(seqTree(binder))(nbIndexingIndices)))
- }
- else if(nbSubPats == 1) List(CODE.REF(binder))
- else ((1 to nbSubPats) map pmgen.tupleSel(binder))).toList
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] = {
+ if (nbSubPats == 0) Nil
+ else if (isSeq) subPatRefsSeq(binder)
+ else ((1 to nbSubPats) map tupleSel(binder)).toList
}
- private def lengthGuard(binder: Symbol): Option[Tree] =
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
if (!isSeq || (expectedLength < minLenToCheck)) None
else { import CODE._
@@ -475,6 +425,120 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
Some((seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO))
}
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): Product_N
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
+ /*override def equals(x$1: Any): Boolean = ...
+ val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
+ */
+ // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
+ // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
+ // println("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
+ // println("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+ private def extractorTp = fun.tpe
+
+ def isTyped = fun.isTyped
+
+ // to which type should the previous binder be casted?
+ def paramType = extractorTp.finalResultType
+
+ def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
+ protected def rawSubPatTypes = extractorTp.paramTypes
+
+ // binder has type paramType
+ def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder), Substitution(subPatBinders, subPatRefs(binder)))
+ }
+
+/* TODO: remove special case when the following bug is fixed
+scala> :paste
+// Entering paste mode (ctrl-D to finish)
+
+class Foo(x: Other) { x._1 } // BUG: can't refer to _1 if its defining class has not been type checked yet
+case class Other(y: String)
+
+// Exiting paste mode, now interpreting.
+
+<console>:8: error: value _1 is not a member of Other
+ class Foo(x: Other) { x._1 }
+ ^
+
+scala> case class Other(y: String)
+defined class Other
+
+scala> class Foo(x: Other) { x._1 }
+defined class Foo */
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
+ val caseAccs = binder.info.typeSymbol.caseFieldAccessors
+ if (caseAccs isDefinedAt (i-1)) REF(binder) DOT caseAccs(i-1)
+ else pmgen.tupleSel(binder)(i)
+ }
+
+ override def toString(): String = "case class "+ (if (extractorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ }
+
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+
+ def tpe = extractorCall.tpe
+ def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
+ def paramType = tpe.paramTypes.head
+ def resultType = tpe.finalResultType
+ def isSeq = extractorCall.symbol.name == nme.unapplySeq
+
+ def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
+ val binder = freshSym(pos, resultInMonad) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass)
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) CODE.REF(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ override protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ else super.subPatRefs(binder)
+
+ protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends Transformer {
+ override def transform(t: Tree) = t match {
+ case Apply(x, List(Ident(nme.SELECTOR_DUMMY))) =>
+ treeCopy.Apply(t, x, List(CODE.REF(binder)))
+ case _ => super.transform(t)
+ }
+ }
+ splice.transform(extractorCallIncludingDummy)
+ }
+
+ // what's the extractor's result type in the monad?
+ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
+ protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
+ if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ else {
+ val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs
+ // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply??
+ if(monadArgs.length == 1) monadArgs(0)
+ else ErrorType
+ }
+ }
+
+ protected lazy val rawSubPatTypes =
+ if (resultInMonad.typeSymbol eq UnitClass) Nil
+ else if(nbSubPats == 1) List(resultInMonad)
+ else getProductArgs(resultInMonad) match {
+ case Nil => List(resultInMonad)
+ case x => x
+ }
override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
}
@@ -579,55 +643,108 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// the making of the trees
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
trait TreeMakers {
- trait TreeMaker {
- def substitution: Substitution ={
- if (currSub eq null) currSub = initialSubstitution
- currSub
- }
+ def inMatchMonad(tp: Type): Type = appliedType(matchingMonadType, List(tp))
+ lazy val optimizingCodeGen = matchingMonadType.typeSymbol eq OptionClass
+
+ abstract class TreeMaker {
+ def substitution: Substitution =
+ if (currSub eq null) localSubstitution
+ else currSub
- protected def initialSubstitution: Substitution
+ protected def localSubstitution: Substitution
- private[TreeMakers] def addOuterSubstitution(outerSubst: Substitution): TreeMaker = {
- currSub = outerSubst >> substitution
- this
+ private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ if (currSub ne null) {
+ println("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ Thread.dumpStack()
+ }
+ else currSub = outerSubst >> substitution
}
private[this] var currSub: Substitution = null
- def chainBefore(next: Tree): Tree
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree, pt: Type): Tree
+ def treesToHoist: List[Tree] = Nil
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker {
+ val localSubstitution: Substitution = EmptySubstitution
+ def chainBefore(next: Tree, pt: Type): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker {
+ val localSubstitution: Substitution = EmptySubstitution
+ def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(substitution(pmgen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here
}
- case class SubstOnlyTreeMaker(initialSubstitution: Substitution) extends TreeMaker {
- def chainBefore(next: Tree): Tree = substitution(next)
+ case class SubstOnlyTreeMaker(localSubstitution: Substitution) extends TreeMaker {
+ def chainBefore(next: Tree, pt: Type): Tree = substitution(next)
}
- trait FunTreeMaker extends TreeMaker {
+ abstract class FunTreeMaker extends TreeMaker {
val nextBinder: Symbol
- // wrap a Fun (with binder nextBinder) around the next tree (unless nextBinder == NoSymbol) and perform our substitution
- protected def wrapFunSubst(next: Tree): Tree = pmgen.fun(nextBinder, substitution(next))
+
+ // for CSE (used iff optimizingCodeGen)
+ // TODO: factor this out -- don't mutate treemakers
+ var reused: Boolean = false
+ def reusedBinders: List[Symbol] = Nil
+ override def treesToHoist: List[Tree] = { import CODE._
+ reusedBinders map { b => VAL(b) === pmgen.mkZero(b.info) }
+ }
}
- trait FreshFunTreeMaker extends FunTreeMaker {
+ abstract class FreshFunTreeMaker extends FunTreeMaker {
val pos: Position
+ val prevBinder: Symbol
val nextBinderTp: Type
lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
}
- trait SingleExtractorTreeMaker extends FunTreeMaker {
- val extractor: Tree
- // build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree): Tree = pmgen.flatMap(extractor, wrapFunSubst(next)) setPos extractor.pos
+ // TODO: factor out optimization-specific stuff into codegen
+ abstract class CondTreeMaker extends FreshFunTreeMaker { import CODE._
+ val cond: Tree
+ val res: Tree
+
+ // for CSE (used iff optimizingCodeGen)
+ // must set reused before!
+ override lazy val reusedBinders = if(reused) List(freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE, nextBinder setFlag MUTABLE) else Nil
+ def storedCond = reusedBinders(0)
+ def storedRes = reusedBinders(1)
+
+ def chainBefore(next: Tree, pt: Type): Tree =
+ if (!reused)
+ atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next)))
+ else { // for CSE (used iff optimizingCodeGen)
+ IF (cond) THEN BLOCK(
+ storedCond === TRUE,
+ storedRes === res,
+ substitution(next).duplicate // TODO: finer-grained dup'ing
+ ) ELSE pmgen.zero
+ }
}
- trait SingleBinderTreeMaker extends FunTreeMaker {
- val prevBinder: Symbol
- lazy val initialSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- }
+ // for CSE (used iff optimizingCodeGen)
+ case class ReusingCondTreeMaker(dropped_priors: List[(TreeMaker, Option[TreeMaker])]) extends TreeMaker { import CODE._
+ lazy val localSubstitution = {
+ val (from, to) = dropped_priors.collect {case (dropped: CondTreeMaker, Some(prior: CondTreeMaker)) => (dropped.nextBinder, REF(prior.storedRes))}.unzip
+ val oldSubs = dropped_priors.collect {case (dropped: TreeMaker, _) => dropped.substitution}
+ oldSubs.foldLeft(Substitution(from, to))(_ >> _)
+ }
+
+ def chainBefore(next: Tree, pt: Type): Tree = {
+ val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: CondTreeMaker)) => ctm}.get.storedCond)
- abstract class SimpleTreeMaker extends SingleExtractorTreeMaker with SingleBinderTreeMaker with FreshFunTreeMaker
+ IF (cond) THEN BLOCK(
+ substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ ) ELSE pmgen.zero
+ }
+ }
/**
* Make a TreeMaker that will result in an extractor call specified by `extractor`
@@ -636,97 +753,531 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
* the function's body is determined by the next TreeMaker
* in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
*/
- case class ExtractorTreeMaker(extractor: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends SingleExtractorTreeMaker
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
+ def chainBefore(next: Tree, pt: Type): Tree = atPos(extractor.pos)(
+ if (extractorReturnsBoolean) pmgen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext(next)))
+ else pmgen.flatMap(extractor, pmgen.fun(nextBinder, substitution(condAndNext(next))))
+ )
- case class FilteredExtractorTreeMaker(extractor: Tree, guard: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends FunTreeMaker {
- def chainBefore(next: Tree): Tree =
- pmgen.flatMap(extractor, wrapFunSubst(pmgen.condOptimized(guard, next))) setPos extractor.pos
+ private def condAndNext(next: Tree): Tree = extraCond map (pmgen.condOptimized(_, next)) getOrElse next
+
+ override def toString = "X"+(extractor, nextBinder)
+ }
+
+ // TODO: allow user-defined unapplyProduct
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
+ def chainBefore(next: Tree, pt: Type): Tree = {
+ val nullCheck = REF(prevBinder) OBJ_NE NULL
+ val cond = extraCond match {
+ case None => nullCheck
+ case Some(c) => nullCheck AND c
+ }
+ pmgen.condOptimized(cond, substitution(next))
+ }
+
+ override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
}
+
// need to substitute since binder may be used outside of the next extractor call (say, in the body of the case)
- case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends SimpleTreeMaker {
- val extractor = pmgen.condCast(typeTest(prevBinder, nextBinderTp), prevBinder, nextBinderTp)
+ case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker {
+ val cond = typeTest(prevBinder, nextBinderTp)
+ val res = pmgen._asInstanceOf(prevBinder, nextBinderTp)
+ override def toString = "TT"+(prevBinder, nextBinderTp)
}
// implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
- case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends SimpleTreeMaker {
+ case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker {
val nextBinderTp = glb(List(patBinder.info.widen, pt))
- val extractor = pmgen.condCast(typeAndEqualityTest(patBinder, pt), patBinder, nextBinderTp)
+
+ val cond = typeAndEqualityTest(patBinder, pt)
+ val res = pmgen._asInstanceOf(patBinder, nextBinderTp)
+ override def toString = "TET"+(patBinder, pt)
}
// need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
- case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, pos: Position) extends SimpleTreeMaker {
- val nextBinderTp: Type = prevBinder.info.widen
+ case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, pos: Position) extends CondTreeMaker {
+ val nextBinderTp = prevBinder.info.widen
// NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
- val extractor = atPos(pos)(pmgen.cond(pmgen._equals(patTree, prevBinder), CODE.REF(prevBinder), nextBinderTp))
+ val cond = pmgen._equals(patTree, prevBinder)
+ val res = CODE.REF(prevBinder)
+ override def toString = "ET"+(prevBinder, patTree)
}
- case class AlternativesTreeMaker(prevBinder: Symbol, alts: Tree*) extends SingleBinderTreeMaker with FreshFunTreeMaker {
- val nextBinderTp: Type = prevBinder.info.widen
- val pos = alts.head.pos
- def chainBefore(next: Tree): Tree =
- pmgen.or(wrapFunSubst(next), alts.toList) setPos alts.head.pos
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+ val localSubstitution: Substitution = EmptySubstitution
+
+ override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ super.incorporateOuterSubstitution(outerSubst)
+ altss = altss map (alts => propagateSubstitution(alts, substitution))
+ }
+
+ def chainBefore(next: Tree, pt: Type): Tree = { import CODE._
+ // next does not contain deftrees, is pretty short
+ val canDuplicate = {
+ var okToInline = true
+ var sizeBudget = 100 / (altss.length max 1) // yep, totally arbitrary!
+ object travOkToInline extends Traverser { override def traverse(tree: Tree): Unit = if (sizeBudget >= 0) { sizeBudget -= 1; tree match {
+ case TypeApply(_, _) | Apply(_, _) | Select(_, _)
+ | Block(_, _) | Assign(_, _) | If(_, _, _) | Typed(_, _) => super.traverse(tree) // these are allowed if their subtrees are
+ case EmptyTree | This(_) | New(_) | Literal(_) | Ident(_) => // these are always ok
+ case _ if tree.isType => // these are always ok
+ case _ => okToInline = false //; println("not inlining: "+ (tree, tree.getClass))
+ }}}
+ travOkToInline.traverse(next)
+ // println("(okToInline, sizeBudget): "+ (okToInline, sizeBudget))
+ okToInline && sizeBudget > 0 // must be strict comparison
+ }
+
+ atPos(pos)(
+ if (canDuplicate) {
+ altss map {altTreeMakers =>
+ combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt)
+ } reduceLeft pmgen.typedOrElse(pt)
+ } else {
+ val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest")
+ // rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil))
+
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ combineExtractors(altTreeMakers :+ TrivialTreeMaker(REF(rest) APPLY ()), pt)
+ )
+ BLOCK(
+ VAL(rest) === Function(Nil, substitution(next)),
+ combinedAlts reduceLeft pmgen.typedOrElse(pt)
+ )
+ }
+ )
+ }
}
- case class GuardTreeMaker(guardTree: Tree) extends SingleExtractorTreeMaker {
- val initialSubstitution: Substitution = EmptySubstitution
- val nextBinder = freshSym(guardTree.pos, UnitClass.tpe)
- val extractor = pmgen.guard(guardTree)
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker {
+ val localSubstitution: Substitution = EmptySubstitution
+ def chainBefore(next: Tree, pt: Type): Tree = pmgen.flatMapGuard(substitution(guardTree), next)
+ override def toString = "G("+ guardTree +")"
}
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- def combineExtractors(treeMakers: List[TreeMaker], body: Tree): Tree = {
- // a foldLeft to accumulate the initialSubstitution left-to-right, but written using a map and a var for clarity
- def propagateSubstitution(treeMakers: List[TreeMaker]): List[TreeMaker] = {
- var accumSubst: Substitution = EmptySubstitution
- treeMakers foreach { maker =>
- // could mutate maker instead, but it doesn't seem to shave much time off of quick.comp
- maker addOuterSubstitution accumSubst
- accumSubst = maker.substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// decisions, decisions
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object Test {
+ var currId = 0
+ }
+ case class Test(cond: Cond, treeMaker: TreeMaker) {
+ // def <:<(other: Test) = cond <:< other.cond
+ // def andThen_: (prev: List[Test]): List[Test] =
+ // prev.filterNot(this <:< _) :+ this
+
+ private val reusedBy = new collection.mutable.HashSet[Test]
+ var reuses: Option[Test] = None
+ def registerReuseBy(later: Test): Unit = {
+ assert(later.reuses.isEmpty)
+ reusedBy += later
+ later.reuses = Some(this)
+ }
+
+ val id = { Test.currId += 1; Test.currId}
+ override def toString =
+ if (cond eq Top) "T"
+ else if(cond eq Havoc) "!?"
+ else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id))
+ }
+
+ object Cond {
+ // def refines(self: Cond, other: Cond): Boolean = (self, other) match {
+ // case (Bottom, _) => true
+ // case (Havoc , _) => true
+ // case (_ , Top) => true
+ // case (_ , _) => false
+ // }
+ var currId = 0
+ }
+
+ abstract class Cond {
+ // def testedPath: Tree
+ // def <:<(other: Cond) = Cond.refines(this, other)
+
+ val id = { Cond.currId += 1; Cond.currId}
+ }
+
+ // does not contribute any knowledge
+ case object Top extends Cond
+
+ // takes away knowledge. e.g., a user-defined guard
+ case object Havoc extends Cond
+
+ // we know everything! everything!
+ // this either means the case is unreachable,
+ // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives
+ // case object Bottom extends Cond
+
+
+ object EqualityCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond]
+ def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
+ }
+ class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond {
+ // def negation = TopCond // inequality doesn't teach us anything
+ // do simplification when we know enough about the tree statically:
+ // - collapse equal trees
+ // - accumulate tests when (in)equality not known statically
+ // - become bottom when we statically know this can never match
+
+ override def toString = testedPath +" == "+ rhs +"#"+ id
+ }
+
+ object TypeCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond]
+ def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
+ }
+ class TypeCond(testedPath: Tree, pt: Type) extends Cond {
+ // def negation = TopCond // inequality doesn't teach us anything
+ // do simplification when we know enough about the tree statically:
+ // - collapse equal trees
+ // - accumulate tests when (in)equality not known statically
+ // - become bottom when we statically know this can never match
+ override def toString = testedPath +" <: "+ pt +"#"+ id
+ }
+
+ object TypeAndEqualityCond {
+ private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeAndEqualityCond]
+ def apply(testedPath: Tree, pt: Type): TypeAndEqualityCond = uniques getOrElseUpdate((testedPath, pt), new TypeAndEqualityCond(testedPath, pt))
+ }
+ class TypeAndEqualityCond(testedPath: Tree, pt: Type) extends Cond {
+ // def negation = TopCond // inequality doesn't teach us anything
+ // do simplification when we know enough about the tree statically:
+ // - collapse equal trees
+ // - accumulate tests when (in)equality not known statically
+ // - become bottom when we statically know this can never match
+ override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id
+ }
+
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is share, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ *
+ * intended to be generalised to exhaustivity/reachability checking
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
+ val pointsToBound = collection.mutable.HashSet(prevBinder)
+
+ // the substitution that renames variables to variables in pointsToBound
+ var normalize: Substitution = EmptySubstitution
+
+ // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
+ // TODO check:
+ // pointsToBound -- accumSubst.from == Set(prevBinder) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ var accumSubst: Substitution = EmptySubstitution
+
+ val trees = new collection.mutable.HashSet[Tree]
+
+ def approximateTreeMaker(tm: TreeMaker): Test = {
+ val subst = tm.substitution
+
+ // find part of substitution that replaces bound symbols by new symbols, and reverse that part
+ // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
+ val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {case (f, t) =>
+ t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ }
+ val (boundFrom, boundTo) = boundSubst.unzip
+ normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
+ // println("normalize: "+ normalize)
+
+ val (unboundFrom, unboundTo) = unboundSubst unzip
+ val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
+ pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
+ // println("pointsToBound: "+ pointsToBound)
+
+ accumSubst >>= okSubst
+ // println("accumSubst: "+ accumSubst)
+
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree): Tree =
+ trees find (a => a.equalsStructure0(t)(sameValue)) match {
+ case Some(orig) => orig // println("unique: "+ (t eq orig, orig));
+ case _ => trees += t; t
+ }
+
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
}
- treeMakers
+
+ def binderToUniqueTree(b: Symbol) = unique(accumSubst(normalize(CODE.REF(b))))
+
+ Test(tm match {
+ case ProductExtractorTreeMaker(pb, None, subst) => Top // TODO: NotNullTest(prevBinder)
+ case tm@TypeTestTreeMaker(prevBinder, nextBinderTp, _) => TypeCond(binderToUniqueTree(prevBinder), uniqueTp(nextBinderTp))
+ case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) => TypeAndEqualityCond(binderToUniqueTree(patBinder), uniqueTp(pt))
+ case tm@EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
+ case ExtractorTreeMaker(_, _, _, _)
+ | GuardTreeMaker(_)
+ | ProductExtractorTreeMaker(_, Some(_), _) => Havoc
+ case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here
+ case SubstOnlyTreeMaker(_) => Top
+ case BodyTreeMaker(_, _) => Havoc
+ }, tm)
}
- propagateSubstitution(treeMakers).foldRight (body) (_ chainBefore _)
- // this optimization doesn't give us much
- // var accumSubst: Substitution = EmptySubstitution
- // var revMakers: List[TreeMaker] = Nil
- // treeMakers foreach { maker =>
- // accumSubst = accumSubst >> maker.substitution
- // maker.substitution = accumSubst
- // revMakers ::= maker
- // }
- //
- // var accumTree = body
- // revMakers foreach { maker =>
- // accumTree = maker chainBefore accumTree
- // }
- //
- // atPos(pos)(accumTree)
+ val testss = cases.map { _ map approximateTreeMaker }
+
+ // interpret:
+ val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]]
+ val tested = new collection.mutable.HashSet[Cond]
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile { test =>
+ val cond = test.cond
+ if ((cond eq Havoc) || (cond eq Top)) (cond eq Top) // stop when we encounter a havoc, skip top
+ else {
+ tested += cond
+
+ // is there an earlier test that checks our condition and whose dependencies are implied by ours?
+ dependencies find { case (priorTest, deps) =>
+ ((priorTest.cond eq cond) || (deps contains cond)) && (deps subsetOf tested)
+ } foreach { case (priorTest, deps) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
+ }
+
+ dependencies(test) = tested.toSet // copies
+ true
+ }
+ }
+ }
+
+ // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
+ // then, collapse these contiguous sequences of reusing tests
+ // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
+ // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
+ testss map { tests =>
+ var currDeps = Set[Cond]()
+ val (sharedPrefix, suffix) = tests span { test =>
+ (test.cond eq Top) || (for(
+ reusedTest <- test.reuses;
+ nextDeps <- dependencies.get(reusedTest);
+ diff <- (nextDeps -- currDeps).headOption;
+ _ <- Some(currDeps = nextDeps))
+ yield diff).nonEmpty
+ }
+
+ val collapsedTreeMakers = if (sharedPrefix.nonEmpty) { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses; if reusedTest.treeMaker.isInstanceOf[FunTreeMaker])
+ reusedTest.treeMaker.asInstanceOf[FunTreeMaker].reused = true
+ // println("sharedPrefix: "+ sharedPrefix)
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix map (t => (t.treeMaker, t.reuses map (_.treeMaker)))) :: suffix.map(_.treeMaker)
+ } else None
+
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above)
+ }
+ }
+
+ // TODO: non-trivial dead-code elimination
+ // e.g., the following match should compile to a simple instanceof:
+ // case class Ident(name: String)
+ // for (Ident(name) <- ts) println(name)
+ def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ // do minimal DCE
+ cases
+ }
+
+
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
}
- def combineCases(scrut: Tree, scrutSym: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): Tree = {
- val matcher =
- if (cases nonEmpty) {
- // when specified, need to propagate pt explicitly (type inferencer can't handle it)
- val optPt =
- if (isFullyDefined(pt)) appliedType(matchingMonadType, List(pt))
- else NoType
+ object SwitchablePattern { def unapply(pat: Tree) = pat match {
+ case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches
+ case _ => false
+ }}
+
+ // def isSwitchable(cases: List[(List[TreeMaker], Tree)]): Boolean = {
+ // def isSwitchableTreeMaker(tm: TreeMaker) = tm match {
+ // case tm@EqualityTestTreeMaker(_, SwitchablePattern(), _) => true
+ // case SubstOnlyTreeMaker(_) => true
+ // case AlternativesTreeMaker(_, altss, _) => altss forall (_.forall(isSwitchableTreeMaker))
+ // case _ => false
+ // }
+ // }
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = if (optimizingCodeGen) {
+ def unfold(tms: List[TreeMaker], currLabel: Option[Symbol] = None, nextLabel: Option[Symbol] = None): List[CaseDef] = tms match {
+ // constant
+ case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => import CODE._
+ @inline
+ def substedBody = btm.substitution(body)
+ val labelledBody = currLabel match {
+ case None => substedBody // currLabel.isEmpty implies nextLabel.isEmpty
+ case Some(myLabel) =>
+ LabelDef(myLabel, Nil,
+ nextLabel match {
+ case None => substedBody
+ case Some(next) => ID(next) APPLY ()
+ }
+ )
+ }
+ List(CaseDef(const, EmptyTree, labelledBody))
- // map + foldLeft
- var combinedCases = combineExtractors(cases.head._1, cases.head._2)
- cases.tail foreach { case (pats, body) =>
- combinedCases = pmgen.typedOrElse(optPt)(combinedCases, combineExtractors(pats, body))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, _) :: bodyTm :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty)
+ val labels = altss map { alts =>
+ Some(freshSym(NoPosition, MethodType(Nil, pt), "$alt$") setFlag (METHOD | LABEL))
}
- pmgen.fun(scrutSym, combinedCases)
- } else pmgen.zero
+ val caseDefs = (altss, labels, labels.tail :+ None).zipped.map { case (alts, currLabel, nextLabel) =>
+ unfold(alts :+ bodyTm, currLabel, nextLabel)
+ }
+
+ if (caseDefs exists (_.isEmpty)) Nil
+ else caseDefs.flatten
+
+ case _ => Nil // failure
+ }
+
+ val caseDefs = cases map { makers =>
+ removeSubstOnly(makers) match {
+ // default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches)
+ case (btm@BodyTreeMaker(body, _)) :: Nil =>
+ List(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body)))
+ case nonTrivialMakers =>
+ unfold(nonTrivialMakers)
+ }
+ }
+
+ if (caseDefs exists (_.isEmpty)) None
+ else { import CODE._
+ val matcher = BLOCK(
+ VAL(scrutSym) === scrut, // TODO: type test for switchable type if patterns allow switch but the scrutinee doesn't
+ Match(REF(scrutSym), caseDefs.flatten) // match on scrutSym, not scrut to avoid duplicating scrut
+ )
+
+ // matcher filter (tree => tree.tpe == null) foreach println
+ // treeBrowser browse matcher
+ Some(matcher) // set type to avoid recursion in typedMatch
+ }
+ } else None
+
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] =
+ doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){
+ val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+
+ emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{
+ var toHoist = List[Tree]()
+ val (matcher, hasDefault) =
+ if (casesUnOpt nonEmpty) {
+ // when specified, need to propagate pt explicitly (type inferencer can't handle it)
+ val optPt =
+ if (isFullyDefined(pt)) inMatchMonad(pt)
+ else NoType
+
+ // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ val hasDefault = casesUnOpt.nonEmpty && {
+ val nonTrivLast = casesUnOpt.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }
+
+ val cases =
+ if (optimizingCodeGen) optimizeCases(scrutSym, casesUnOpt, pt)
+ else casesUnOpt
+
+ val combinedCases =
+ cases.map(combineExtractors(_, pt)).reduceLeft(pmgen.typedOrElse(optPt))
- pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType)
+ toHoist = (for (treeMakers <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList
+
+ (pmgen.fun(scrutSym, combinedCases), hasDefault)
+ } else (pmgen.zero, false)
+
+ val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType, hasDefault)
+ if (toHoist isEmpty) expr
+ else Block(toHoist, expr)
+ }
}
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree =
+ treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt))
+
+
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newValue(t.pos, nme.ANON_FUN_NAME).setFlag(SYNTHETIC).setInfo(NoType)
+ // println("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
+ assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner)
+ d.symbol.lazyAccessor.owner = currentOwner
+ }
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // println("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // println("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
object Substitution {
def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
// requires sameLength(from, to)
@@ -736,11 +1287,14 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
class Substitution(val from: List[Symbol], val to: List[Tree]) {
def apply(tree: Tree): Tree = typedSubst(tree, from, to)
+
+ // the substitution that chains `other` before `this` substitution
// forall t: Tree. this(other(t)) == (this >> other)(t)
def >>(other: Substitution): Substitution = {
val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
}
+ override def toString = (from zip to) mkString("Substitution(", ", ", ")")
}
object EmptySubstitution extends Substitution(Nil, Nil) {
@@ -748,6 +1302,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
override def >>(other: Substitution): Substitution = other
}
+
def matchingMonadType: Type
def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree
def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x"): Symbol
@@ -756,29 +1311,134 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// codegen relevant to the structure of the translation (how extractors are combined)
trait AbsCodeGen { import CODE.UNIT
- def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type): Tree
+ def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree
def flatMap(a: Tree, b: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
def fun(arg: Symbol, body: Tree): Tree
- def or(f: Tree, as: List[Tree]): Tree
def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree
- def guard(c: Tree): Tree
def zero: Tree
- // TODO: defaults in traits + self types == broken?
- // def guard(c: Tree, then: Tree, tp: Type): Tree
- // def cond(c: Tree): Tree = cond(c, UNIT, NoType)
- def cond(c: Tree, then: Tree, tp: Type): Tree
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
def condOptimized(c: Tree, then: Tree): Tree
- def condCast(c: Tree, binder: Symbol, expectedTp: Type): Tree
def _equals(checker: Tree, binder: Symbol): Tree
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def mkZero(tp: Type): Tree
}
def pmgen: AbsCodeGen
+ def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator
}
- // generate actual trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// generate actual trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
trait MatchCodeGen extends TreeMakers {
- def matchingStrategy: Tree
- def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator
+ lazy val pmgen: CommonCodeGen with MatchingStrategyGen with MonadInstGen =
+ if (optimizingCodeGen) (new CommonCodeGen with OptimizedCodeGen {})
+ else (new CommonCodeGen with MatchingStrategyGen with MonadInstGen {})
+
+ import CODE._
+
+ trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
+ // methods in MatchingStrategy (the monad companion) -- used directly in translation
+ def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutTp, resTp) APPLY (scrut) APPLY (matcher) // matchingStrategy.runOrElse(scrut)(matcher)
+ // *only* used to wrap the RHS of a body (isDefinedAt synthesis relies on this)
+ def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (matchingStrategy DOT vpmName.one) (_asInstanceOf(res, bodyPt, force = true)) // matchingStrategy.one(res), like one, but blow this one away for isDefinedAt (since it's the RHS of a case)
+ def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero
+ def guard(c: Tree, then: Tree, tp: Type): Tree = genTypeApply((matchingStrategy DOT vpmName.guard), repackExistential(tp)) APPLY (c, then) // matchingStrategy.guard[tp](c, then)
+ }
+
+ trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
+ // methods in the monad instance -- used directly in translation
+ def flatMap(a: Tree, b: Tree): Tree = (a DOT vpmName.flatMap)(b)
+ def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase)
+
+ // TODO: the trees generated by flatMapCond and flatMapGuard may need to be distinguishable by exhaustivity checking -- they aren't right now
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol,
+ nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), fun(nextBinder, next))
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, UnitClass.tpe), UnitClass.tpe, next)
+ }
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ // this trait overrides ALL of the methods of MatchingStrategyGen with MonadInstGen
+ trait OptimizedCodeGen extends CommonCodeGen with MatchingStrategyGen with MonadInstGen {
+ lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero")
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse (throw new MatchError(x))
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ @inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
+ lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
+ lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
+ override def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean) = matcher match {
+ case Function(List(x: ValDef), body) =>
+ matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
+ if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
+ BLOCK(
+ VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
+ VAL(x.symbol) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for x.symbol in body -- the owner structure is repaired by fixerUpper
+ VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
+ VAL(keepGoing) === TRUE,
+ body,
+ if(hasDefault) REF(matchRes)
+ else (IF (REF(keepGoing)) THEN MATCHERROR(REF(x.symbol)) ELSE REF(matchRes))
+ )
+ }
+
+ // only used to wrap the RHS of a body
+ override def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
+ BLOCK(
+ if (dontStore(matchPt)) res // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
+ else (REF(matchRes) === res), // _asInstanceOf(res, tp.widen, force = true)
+ REF(keepGoing) === FALSE,
+ zero // to have a nice lub for lubs -- otherwise we'll get a boxed unit here -- TODO: get rid of all those dangling else zero's
+ )
+ }
+
+ override def zero: Tree = REF(zeroSym)
+
+ // guard is only used by flatMapCond and flatMapGuard, which are overridden
+ override def guard(c: Tree, then: Tree, tp: Type): Tree = throw new NotImplementedError("guard is never called by optimizing codegen")
+
+ override def flatMap(opt: Tree, fun: Tree): Tree = fun match {
+ case Function(List(x: ValDef), body) =>
+ val tp = inMatchMonad(x.symbol.tpe)
+ val vs = freshSym(opt.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+ val v = VAL(vs) === opt
+
+ BLOCK(
+ v,
+ IF (vs DOT isEmpty) THEN zero ELSE typedSubst(body, List(x.symbol), List(vs DOT get)) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor)
+ )
+ case _ => println("huh?")
+ (opt DOT vpmName.flatMap)(fun)
+ }
+
+ override def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
+ BLOCK(
+ thisCase,
+ IF (REF(keepGoing)) THEN elseCase ELSE zero // leave trailing zero for now, otherwise typer adds () anyway
+ )
+ }
+
+ override def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree =
+ IF (cond) THEN BLOCK(
+ VAL(nextBinder) === res,
+ next
+ ) ELSE zero
+
+ override def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ IF (guardTree) THEN next ELSE zero
+ }
@inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
if (origTp == null || origTp == NoType) to
@@ -807,10 +1467,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}).transform(tree)
}
- lazy val pmgen: CommonCodeGen with MatchingStrategyGen with MonadInstGen =
- if (matchingMonadType.typeSymbol eq OptionClass) (new CommonCodeGen with MatchingStrategyGenOpt with MonadInstGenOpt {})
- else (new CommonCodeGen with MatchingStrategyGen with MonadInstGen {})
-
var ctr = 0
def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1;
// assert(owner ne null)
@@ -824,34 +1480,32 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
}
object vpmName {
- val caseResult = "caseResult".toTermName
- val drop = "drop".toTermName
- val flatMap = "flatMap".toTermName
- val get = "get".toTermName
- val guard = "guard".toTermName
- val isEmpty = "isEmpty".toTermName
- val one = "one".toTermName
- val or = "or".toTermName
- val orElse = "orElse".toTermName
- val outer = "<outer>".toTermName
- val runOrElse = "runOrElse".toTermName
- val zero = "zero".toTermName
-
- def counted(str: String, i: Int) = (str+i).toTermName
- def tupleIndex(i: Int) = ("_"+i).toTermName
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+
+ def counted(str: String, i: Int) = newTermName(str+i)
}
- import CODE._
def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
trait CommonCodeGen extends AbsCodeGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT vpmName.tupleIndex(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
- def and(a: Tree, b: Tree): Tree = a AND b
+ def and(a: Tree, b: Tree): Tree = a AND b
+ def condOptimized(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp)
@@ -869,128 +1523,25 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
else gen.mkAsInstanceOf(REF(b), tpX, true, false)
}
- }
- trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- // methods in MatchingStrategy (the monad companion) -- used directly in translation
- def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type): Tree = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutTp, resTp) APPLY (scrut) APPLY (matcher) // matchingStrategy.runOrElse(scrut)(matcher)
- def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero
- def one(res: Tree, tp: Type = NoType, oneName: Name = vpmName.one): Tree = genTypeApply(matchingStrategy DOT oneName, tp) APPLY (res) // matchingStrategy.one(res)
- def or(f: Tree, as: List[Tree]): Tree = (matchingStrategy DOT vpmName.or)((f :: as): _*) // matchingStrategy.or(f, as)
- def guard(c: Tree): Tree = (matchingStrategy DOT vpmName.guard)(c, UNIT) // matchingStrategy.guard(c, then) -- a user-defined guard
- // TODO: get rid of the cast when it's unnecessary, but this requires type checking `body` -- maybe this should be one of the optimisations we perform after generating the tree
- def caseResult(res: Tree, tp: Type): Tree = (matchingStrategy DOT vpmName.caseResult) (_asInstanceOf(res, tp, force = true)) // matchingStrategy.caseResult(res), like one, but blow this one away for isDefinedAt (since it's the RHS of a case)
-
- // an internal guard TODO: use different method call so exhaustiveness can distinguish it from user-defined guards
- def cond(c: Tree, then: Tree = UNIT, tp: Type = NoType): Tree = genTypeApply((matchingStrategy DOT vpmName.guard), repackExistential(tp)) APPLY (c, then) // matchingStrategy.guard(c, then)
- def condCast(c: Tree, binder: Symbol, expectedTp: Type): Tree = cond(c, _asInstanceOf(binder, expectedTp), expectedTp)
- def condOptimized(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
- }
-
- trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- // methods in the monad instance -- used directly in translation
- def flatMap(a: Tree, b: Tree): Tree = (a DOT vpmName.flatMap)(b)
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase)
- }
-
- // when we know we're targetting Option, do some inlining the optimizer won't do
- // `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
- // this is a special instance of the advanced inlining optimization that takes a method call on
- // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- trait MatchingStrategyGenOpt extends MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- override def guard(c: Tree): Tree = condOptimized(c, one(UNIT))
- override def cond(c: Tree, then: Tree = UNIT, tp: Type = NoType): Tree = condOptimized(c, one(then, repackExistential(tp)))
- // override def runOrElse(scrut: Tree, matcher: Tree): Tree = matcher match {
- // case Function(List(x: ValDef), body) =>
- // val tp = x.symbol.tpe
- // val restp = appliedType(matchingMonadType, List(pt)) // don't always know pt....
- // val isEmpty = restp member vpmName.isEmpty
- // val get = restp member vpmName.get
- //
- // val vs = freshSym(scrut.pos, tp, "s")
- // val vres = freshSym(scrut.pos, restp, "res")
- // val s = VAL(vs) === scrut
- // val res = VAL(vres) === typedSubst(body, List(x.symbol), List(REF(vs)))
- //
- // BLOCK(
- // s,
- // res,
- // IF (res DOT isEmpty) THEN ELSE (res DOT get)
- // )
- // }
- }
-
- trait MonadInstGenOpt extends MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen =>
- override def flatMap(opt: Tree, fun: Tree): Tree = fun match {
- case Function(List(x: ValDef), body) =>
- val tp = appliedType(matchingMonadType, List(x.symbol.tpe))
- val vs = freshSym(opt.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
- val v = VAL(vs) === opt
-
- BLOCK(
- v,
- IF (vs DOT isEmpty) THEN zero ELSE typedSubst(body, List(x.symbol), List(vs DOT get))
- )
- case _ => println("huh?")
- (opt DOT vpmName.flatMap)(fun)
- }
- override def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
- val vs = freshSym(thisCase.pos, pt, "o")
- val isEmpty = pt member vpmName.isEmpty
- val v = VAL(vs) === thisCase // genTyped(, pt)
- BLOCK(
- v,
- IF (vs DOT isEmpty) THEN elseCase /*genTyped(, pt)*/ ELSE REF(vs)
- )
- }
- }
-
- def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
- // def genTyped(t: Tree, tp: Type): Tree = if(tp == NoType) t else Typed(t, TypeTree(repackExistential(tp)))
- }
-
-
- // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
- // TODO: assign more fine-grained positions
- // fixes symbol nesting, assigns positions
- def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
- currentOwner = origOwner
-
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- }
- t match {
- case Function(_, _) if t.symbol == NoSymbol =>
- t.symbol = currentOwner.newValue(t.pos, nme.ANON_FUN_NAME).setFlag(SYNTHETIC).setInfo(NoType)
- // println("new symbol for "+ (t, t.symbol.ownerChain))
- case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
- t.symbol.owner = currentOwner
- case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree??
- assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner)
- d.symbol.lazyAccessor.owner = currentOwner
- }
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
-
- d.symbol.owner = currentOwner
- // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
- case _ =>
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
}
- super.traverse(t)
}
- // override def apply
- // println("before fixerupper: "+ xTree)
- // currentRun.trackerFactory.snapshot()
- // println("after fixerupper")
- // currentRun.trackerFactory.snapshot()
+ def matchingStrategy: Tree
}
}
@@ -1003,3 +1554,42 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// var okTree: Tree = null
// }
// private def c(t: Tree): Tree = noShadowedUntyped(t)
+
+ // def approximateTreeMaker(tm: TreeMaker): List[Test] = tm match {
+ // case ExtractorTreeMaker(extractor, _, _) => HavocTest
+ // case FilteredExtractorTreeMaker(extractor, lenGuard, _, _) => HavocTest
+ // case ProductExtractorTreeMaker(testedBinder, lenGuard, _) => TopTest // TODO: (testedBinder ne null) and lenGuard
+ //
+ // // cond = typeTest(prevBinder, nextBinderTp)
+ // // res = pmgen._asInstanceOf(prevBinder, nextBinderTp)
+ // case TypeTestTreeMaker(testedBinder, pt, _) =>
+ //
+ // // cond = typeAndEqualityTest(patBinder, pt)
+ // // res = pmgen._asInstanceOf(patBinder, nextBinderTp)
+ // case TypeAndEqualityTestTreeMaker(_, testedBinder, pt, _) =>
+ //
+ // // cond = pmgen._equals(patTree, prevBinder)
+ // // res = CODE.REF(prevBinder)
+ // case EqualityTestTreeMaker(testedBinder, rhs, _) =>
+ //
+ // case AlternativesTreeMaker(_, alts: *) =>
+ //
+ // case GuardTreeMaker(guardTree) =>
+ // }
+
+ // // TODO: it's not exactly sound to represent an unapply-call by its symbol... also need to consider the prefix, like the outer-test (can this be captured as the path to this test?)
+ // type ExtractorRepr = Symbol
+ //
+ // // TODO: we're undoing tree-construction that we ourselves performed earlier -- how about not-doing so we don't have to undo?
+ // private def findBinderArgOfApply(extractor: Tree, unappSym: Symbol): Symbol = {
+ // class CollectTreeTraverser[T](pf: PartialFunction[Tree => T]) extends Traverser {
+ // val hits = new ListBuffer[T]
+ // override def traverse(t: Tree) {
+ // if (pf.isDefinedAt(t)) hits += pf(t)
+ // super.traverse(t)
+ // }
+ // }
+ // val trav = new CollectTreeTraverser{ case Apply(unapp, List(arg)) if unapp.symbol eq unappSym => arg.symbol}
+ // trav.traverse(extractor)
+ // trav.hits.headOption getOrElse NoSymbol
+ // }
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 645d3ecfa0..8f9cd46611 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -150,49 +150,64 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// Override checking ------------------------------------------------------------
+
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
- val self = clazz.thisType
- val bridges = new ListBuffer[Tree]
-
- def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- val bridge = member.cloneSymbolImpl(clazz)
- .setPos(clazz.pos).setFlag(member.flags | VBRIDGE)
- bridge.setInfo(bridgetpe.cloneInfo(bridge))
- clazz.info.decls enter bridge
- val List(params) = bridge.paramss
- val TypeRef(_, JavaRepeatedParamClass, List(elemtp)) = params.last.tpe
- val (initargs, List(lastarg0)) = (params map Ident) splitAt (params.length - 1)
- val lastarg = gen.wildcardStar(gen.mkWrapArray(lastarg0, elemtp))
- val body = Apply(Select(This(clazz), member), initargs ::: List(lastarg))
- localTyper.typed {
- /*util.trace("generating varargs bridge")*/(DefDef(bridge, body))
+ // This is quite expensive, so attempt to skip it completely.
+ // Insist there at least be a java-defined ancestor which
+ // defines a varargs method. TODO: Find a cheaper way to exclude.
+ if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
+ val self = clazz.thisType
+ val bridges = new ListBuffer[Tree]
+
+ def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
+ log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+
+ val bridge = member.cloneSymbolImpl(clazz)
+ .setPos(clazz.pos).setFlag(member.flags | VBRIDGE)
+ bridge.setInfo(bridgetpe.cloneInfo(bridge))
+ clazz.info.decls enter bridge
+
+ val params = bridge.paramss.head
+ val elemtp = params.last.tpe.typeArgs.head
+ val idents = params map Ident
+ val lastarg = gen.wildcardStar(gen.mkWrapArray(idents.last, elemtp))
+ val body = Apply(Select(This(clazz), member), idents.init :+ lastarg)
+
+ localTyper typed DefDef(bridge, body)
}
- }
-
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
- // if a method with type `jtpe` exists and that method is not a varargs bridge
- // then create a varargs bridge of type `jtpe` that forwards to the
- // member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
- if (!(member hasFlag DEFERRED) && hasRepeatedParam(member.info)) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
- val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE) filter (
- sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)
- // this is a bit tortuous: we look for non-private members or bridges
- // if we find a bridge everything is OK. If we find another member,
- // we need to create a bridge
- )
- if (inherited.exists) {
- bridges += varargBridge(member, jtpe)
+
+ // For all concrete non-private members that have a (Scala) repeated parameter:
+ // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // if a method with type `jtpe` exists and that method is not a varargs bridge
+ // then create a varargs bridge of type `jtpe` that forwards to the
+ // member method with the Scala vararg type.
+ for (member <- clazz.info.nonPrivateMembers) {
+ if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
+ val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+ // Delaying calling memberType as long as possible
+ if (inherited ne NoSymbol) {
+ val jtpe = toJavaRepeatedParam(self.memberType(member))
+ // this is a bit tortuous: we look for non-private members or bridges
+ // if we find a bridge everything is OK. If we find another member,
+ // we need to create a bridge
+ if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ bridges += varargBridge(member, jtpe)
+ }
}
}
+
+ bridges.toList
}
-
- bridges.toList
+ else Nil
}
/** 1. Check all members of class `clazz` for overriding conditions.
@@ -395,8 +410,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
}
-
-
def checkOverrideTypes() {
if (other.isAliasType) {
//if (!member.typeParams.isEmpty) (1.5) @MAT
@@ -405,14 +418,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
// overrideError("may not override parameterized type");
// @M: substSym
- if( !(sameLength(member.typeParams, other.typeParams) && (self.memberType(member).substSym(member.typeParams, other.typeParams) =:= self.memberType(other))) ) // (1.6)
+ if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
overrideTypeError();
- } else if (other.isAbstractType) {
+ }
+ else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
-
- val memberTp = self.memberType(member)
val otherTp = self.memberInfo(other)
+
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
explainTypes(_.bounds containsType _, otherTp, memberTp)
@@ -1092,7 +1105,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- if (nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
if (actual isSubClass receiver) ()
else if (receiver isSubClass actual) ()
// warn only if they have no common supertype below Object
@@ -1273,8 +1286,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- for (msg <- sym.migrationMessage)
- unit.warning(pos, sym.fullLocationString + " has changed semantics:\n" + msg)
+ if (sym.hasMigrationAnnotation)
+ unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
+ sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
+ )
}
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
@@ -1332,7 +1347,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
}
// types of the value parameters
- member.paramss.flatten foreach (p => checkAccessibilityOfType(p.tpe))
+ mapParamss(member)(p => checkAccessibilityOfType(p.tpe))
// upper bounds of type parameters
member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
@@ -1547,7 +1562,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
checkOverloadedRestrictions(currentOwner)
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
-
if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
else tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index c9991614e4..cde531adc1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -43,7 +43,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
private def transformArgs(params: List[Symbol], args: List[Tree]) = {
- treeInfo.zipMethodParamsAndArgs(params, args) map { case (param, arg) =>
+ treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) =>
if (isByNameParamType(param.tpe))
withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
@@ -321,12 +321,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val code = DefDef(protAcc, {
val (receiver :: _) :: tail = protAcc.paramss
val base: Tree = Select(Ident(receiver), sym)
- val allParamTypes = sym.tpe.paramss map (xs => xs map (_.tpe))
-
- (tail zip allParamTypes).foldLeft(base) {
- case (fn, (params, tpes)) =>
- Apply(fn, params zip tpes map { case (p, tp) => makeArg(p, receiver, tp) })
- }
+ val allParamTypes = mapParamss(sym)(_.tpe)
+ val args = map2(tail, allParamTypes)((params, tpes) => map2(params, tpes)(makeArg(_, receiver, _)))
+ args.foldLeft(base)(Apply(_, _))
})
debuglog("" + code)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index cf8c0c596c..92e4e257bf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -39,10 +39,6 @@ trait SyntheticMethods extends ast.TreeDSL {
private object util {
private type CM[T] = ClassManifest[T]
- lazy val IteratorModule = getModule("scala.collection.Iterator")
- lazy val Iterator_apply = getMember(IteratorModule, nme.apply)
- def iteratorOfType(tp: Type) = appliedType(IteratorClass.typeConstructor, List(tp))
-
def ValOrDefDef(sym: Symbol, body: Tree) =
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
@@ -50,7 +46,7 @@ trait SyntheticMethods extends ast.TreeDSL {
/** To avoid unchecked warnings on polymorphic classes.
*/
def clazzTypeToTest(clazz: Symbol) = clazz.tpe.normalize match {
- case TypeRef(_, sym, args) if args.nonEmpty => ExistentialType(sym.typeParams, clazz.tpe)
+ case TypeRef(_, sym, args) if args.nonEmpty => newExistentialType(sym.typeParams, clazz.tpe)
case tp => tp
}
@@ -76,11 +72,11 @@ trait SyntheticMethods extends ast.TreeDSL {
}
def manifestToSymbol(m: CM[_]): Symbol = m match {
- case x: scala.reflect.AnyValManifest[_] => definitions.getClass("scala." + x)
+ case x: scala.reflect.AnyValManifest[_] => getMember(ScalaPackageClass, newTermName("" + x))
case _ => getClassIfDefined(m.erasure.getName)
}
def companionType[T](implicit m: CM[T]) =
- getModule(m.erasure.getName).tpe
+ getRequiredModule(m.erasure.getName).tpe
// Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
def applyType[M](implicit m1: CM[M]): Type =
@@ -202,7 +198,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// in the original order.
def accessors = clazz.caseFieldAccessors sortBy { acc =>
originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name + "$").toTermName)
+ (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
}
}
val arity = accessors.size
@@ -225,7 +221,7 @@ trait SyntheticMethods extends ast.TreeDSL {
)
def forwardToRuntime(method: Symbol): Tree =
- forwardMethod(method, getMember(ScalaRunTimeModule, "_" + method.name toTermName))(This(clazz) :: _)
+ forwardMethod(method, getMember(ScalaRunTimeModule, method.name prepend "_"))(This(clazz) :: _)
// Any member, including private
def hasConcreteImpl(name: Name) =
@@ -238,14 +234,14 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
- clazzMember(name.toTermName).info match {
+ clazzMember(newTermName(name)).info match {
case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
case _ => default
}
}
def productIteratorMethod = {
createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
- gen.mkMethodCall(ScalaRunTimeModule, "typedProductIterator", List(accessorLub), List(This(clazz)))
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(This(clazz)))
)
}
def projectionMethod(accessor: Symbol, num: Int) = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 7671ccbed7..5ccf27ded9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -969,7 +969,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
return typed(Select(tree, meth), mode, pt)
}
if (coercion != EmptyTree) {
- debuglog("inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe)
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv.value)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed(
new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
}
@@ -1056,7 +1060,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
inferView(qual, qual.tpe, searchTemplate, true) match {
case EmptyTree => qual
- case coercion => typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
+ case coercion =>
+ if (settings.logImplicitConv.value)
+ unit.echo(qual.pos,
+ "applied implicit conversion from %s to %s = %s".format(
+ qual.tpe, searchTemplate, coercion.symbol.defString))
+
+ typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
}
}
else qual
@@ -1207,7 +1217,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
else
- (preSuperStats, preSuperVals).zipped map { case (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe }
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
case _ =>
if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
@@ -1361,7 +1371,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
"you want, you must write the annotation class in Java.")
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
- val m = companionModuleOf(clazz, context)
+ val m = companionSymbolOf(clazz, context)
if (m != NoSymbol)
m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List()))
}
@@ -1377,7 +1387,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def typedModuleDef(mdef: ModuleDef): Tree = {
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
- val linkedClass = companionClassOf(mdef.symbol, context)
+ val linkedClass = companionSymbolOf(mdef.symbol, context)
if (linkedClass != NoSymbol)
linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize)
@@ -1959,7 +1969,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (argpts.lengthCompare(numVparams) != 0)
errorTree(fun, "wrong number of parameters; expected = " + argpts.length)
else {
- val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) =>
+ val vparamSyms = map2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
@@ -2195,15 +2205,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = {
def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass
- (formals, args).zipped exists {
+ exists2(formals, args) {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
- (formalargs, vparams).zipped.exists ((formalarg, vparam) =>
- vparam.tpt.isEmpty && (tparams exists (formalarg contains))) &&
- (tparams forall isLowerBounded)
+ ( exists2(formalargs, vparams)((formal, vparam) =>
+ vparam.tpt.isEmpty && (tparams exists formal.contains))
+ && (tparams forall isLowerBounded)
+ )
case _ =>
false
})
@@ -2223,7 +2234,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
(methCtx != NoContext) && {
val contextFun = methCtx.tree.symbol
contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
- companionModuleOf(calledFun.owner, context).moduleClass == contextFun.owner
+ companionSymbolOf(calledFun.owner, context).moduleClass == contextFun.owner
}
}
}
@@ -2460,7 +2471,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
assert(!inPatternMode(mode)) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
+ val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
var remainingParams = paramTypes
def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
@@ -2477,7 +2488,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
arg1
}
- val args1 = (args, formals).zipped map typedArgToPoly
+ val args1 = map2(args, formals)(typedArgToPoly)
if (args1 exists (_.tpe.isError)) errTree
else {
debuglog("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug
@@ -2926,7 +2937,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
override val typeParams = tparams map (_.symbol)
val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
// Replace the symbols
- def substitute() = (tparams, typeSkolems).zipped map (_ setSymbol _)
+ def substitute() = map2(tparams, typeSkolems)(_ setSymbol _)
override def complete(sym: Symbol) {
// The info of a skolem is the skolemized info of the
// actual type parameter of the skolem
@@ -2950,6 +2961,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
new DeSkolemizeMap mapOver tp
}
+ def typedClassOf(tree: Tree, tpt: Tree) = {
+ checkClassType(tpt, true, false)
+ atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
+ }
+
protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
for (wc <- tree.whereClauses)
if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
@@ -2960,7 +2976,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
error(vd.pos, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
val tpt1 = typedType(tree.tpt, mode)
existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) =>
- TypeTree(ExistentialType(tparams, tp)) setOriginal tree
+ TypeTree(newExistentialType(tparams, tp)) setOriginal tree
)
}
@@ -2989,10 +3005,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (sameLength(tparams, args)) {
val targs = args map (_.tpe)
checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "")
- if (fun.symbol == Predef_classOf) {
- checkClassType(args.head, true, false)
- atPos(tree.pos) { gen.mkClassOf(targs.head) }
- } else {
+ if (fun.symbol == Predef_classOf)
+ typedClassOf(tree, args.head)
+ else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && !targs.isEmpty)
checkCheckable(tree.pos, targs.head, "")
val resultpe = restpe.instantiateTypeParams(tparams, targs)
@@ -3219,9 +3234,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val owntype = elimAnonymousClass(owntype0)
if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype))
- val translated = (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype)
-
- typed1(translated, mode, WildcardType) setType owntype // TODO: get rid of setType owntype -- it should all typecheck
+ (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
+ case Block(vd :: Nil, tree@Match(selector, cases)) =>
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt)
+ val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
+ if (needAdapt)
+ cases1 = cases1 map (adaptCase(_, owntype))
+ typed(Block(vd :: Nil, treeCopy.Match(tree, selector1, cases1) setType owntype))
+ case translated =>
+ // TODO: get rid of setType owntype -- it should all typecheck
+ // must call typed, not typed1, or we overflow the stack when emitting switches
+ typed(translated, mode, WildcardType) setType owntype
+ }
}
}
}
@@ -3453,7 +3478,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case ex: TypeError =>
fun match {
case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(name.decode) =>
+ if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
stopTimer(failedOpEqNanos, opeqStart)
@@ -3769,7 +3794,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
reallyExists(sym) &&
((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
}
-
+
if (defSym == NoSymbol) {
var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
@@ -3900,13 +3925,23 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
}
- if (defSym.owner.isPackageClass) pre = defSym.owner.thisType
+ if (defSym.owner.isPackageClass)
+ pre = defSym.owner.thisType
+
+ // Inferring classOf type parameter from expected type.
if (defSym.isThisSym) {
typed1(This(defSym.owner) setPos tree.pos, mode, pt)
- } else {
- val tree1 = if (qual == EmptyTree) tree
- else atPos(tree.pos)(Select(qual, name))
- // atPos necessary because qualifier might come from startContext
+ }
+ // Inferring classOf type parameter from expected type. Otherwise an
+ // actual call to the stubbed classOf method is generated, returning null.
+ else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ typedClassOf(tree, TypeTree(pt.typeArgs.head))
+ else {
+ val tree1 = (
+ if (qual == EmptyTree) tree
+ // atPos necessary because qualifier might come from startContext
+ else atPos(tree.pos)(Select(qual, name))
+ )
val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
// assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
stabilize(tree2, pre2, mode, pt) match {
@@ -3948,7 +3983,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
val argtypes = args1 map (_.tpe)
- (args, tparams).zipped foreach { (arg, tparam) => arg match {
+ foreach2(args, tparams)((arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3957,7 +3992,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
- }}
+ })
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
@@ -4055,7 +4090,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
case UnApply(fun, args) =>
val fun1 = typed(fun)
val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length)
- val args1 = (args, tpes).zipped map typedPattern
+ val args1 = map2(args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
case ArrayValue(elemtpt, elems) =>
@@ -4110,7 +4145,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
if (isValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
else {
val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- ExistentialType(List(tparam), arrayType(tparam.tpe))
+ newExistentialType(List(tparam), arrayType(tparam.tpe))
}
val (expr1, baseClass) = expr0.tpe.typeSymbol match {
case ArrayClass => (adapt(expr0, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
@@ -4246,6 +4281,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
typedIdent(name)
+ case ReferenceToBoxed(idt @ Ident(_)) =>
+ val id1 = typed1(idt, mode, pt) match { case id: Ident => id }
+ treeCopy.ReferenceToBoxed(tree, id1) setType AnyRefClass.tpe
+
case Literal(value) =>
tree setType (
if (value.tag == UnitTag) UnitClass.tpe
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 4f7e6225e1..fd6f972ffc 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -21,6 +21,8 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
+ private val unapplyParamName = newTermName("x$0")
+
/** returns type list for return type of the extraction */
def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
assert(ufn.isMethod)
@@ -112,7 +114,7 @@ trait Unapplies extends ast.TreeDSL
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
- vparamss map (_ map copyUntyped[ValDef])
+ mmap(vparamss)(copyUntyped[ValDef])
}
/** The return value of an unapply method of a case class C[Ts]
@@ -165,7 +167,7 @@ trait Unapplies extends ast.TreeDSL
val cparamss = constrParamss(cdef)
atPos(cdef.pos.focus)(
DefDef(caseMods, nme.apply, tparams, cparamss, classType(cdef, tparams),
- New(classType(cdef, tparams), cparamss map (_ map gen.paramToArg)))
+ New(classType(cdef, tparams), mmap(cparamss)(gen.paramToArg)))
)
}
@@ -173,14 +175,13 @@ trait Unapplies extends ast.TreeDSL
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
- val paramName = newTermName("x$0")
val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), paramName, classType(cdef, tparams), EmptyTree))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(paramName))
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
@@ -201,12 +202,12 @@ trait Unapplies extends ast.TreeDSL
def paramWithDefault(vd: ValDef) =
treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
- val paramss = cparamss map (_ map paramWithDefault)
+ val paramss = mmap(cparamss)(paramWithDefault)
val classTpe = classType(cdef, tparams)
Some(atPos(cdef.pos.focus)(
DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, classTpe,
- New(classTpe, paramss map (_ map toIdent)))
+ New(classTpe, mmap(paramss)(toIdent)))
))
}
}
diff --git a/src/compiler/scala/tools/nsc/util/ProxyReport.scala b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
index 86cf2006bb..2f4f029308 100644
--- a/src/compiler/scala/tools/nsc/util/ProxyReport.scala
+++ b/src/compiler/scala/tools/nsc/util/ProxyReport.scala
@@ -13,7 +13,7 @@ import scala.collection.{ mutable, immutable, generic }
trait ProxyReport {
val global: Global
import global._
- import definitions.{ getClass => gc, _ }
+ import definitions._
private object classes {
def isIgnorable(sym: Symbol) = sym :: sym.allOverriddenSymbols exists { s =>
@@ -26,13 +26,13 @@ trait ProxyReport {
methods foreach (m => m.initialize.info.paramss.flatten foreach (_.initialize))
methods
}
- lazy val GlobalClass = gc(classOf[Global].getName)
- lazy val GenericClass = getModule("scala.collection.generic").moduleClass
- lazy val CollectionClass = getModule("scala.collection").moduleClass
+ lazy val GlobalClass = getRequiredClass(classOf[Global].getName)
+ lazy val GenericClass = getRequiredModule("scala.collection.generic").moduleClass
+ lazy val CollectionClass = getRequiredModule("scala.collection").moduleClass
- def getType(name: String) = getMember(GlobalClass, name.toTypeName)
- def getColl(name: String) = getMember(CollectionClass, name.toTypeName)
- def getGeneric(name: String) = getMember(GenericClass, name.toTypeName)
+ def getType(name: String) = getMember(GlobalClass, newTypeName(name))
+ def getColl(name: String) = getMember(CollectionClass, newTypeName(name))
+ def getGeneric(name: String) = getMember(GenericClass, newTypeName(name))
// the following operations + those in RewrappingTypeProxy are all operations
// in class Type that are overridden in some subclass
diff --git a/src/compiler/scala/tools/reflect/Mock.scala b/src/compiler/scala/tools/reflect/Mock.scala
index 5301816b4b..52c052b8a2 100644
--- a/src/compiler/scala/tools/reflect/Mock.scala
+++ b/src/compiler/scala/tools/reflect/Mock.scala
@@ -25,7 +25,8 @@ trait Mock extends (Invoked => AnyRef) {
def newInvocationHandler() = new InvocationHandler {
def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]) =
- mock(Invoked(proxy, method, args))
+ try { mock(Invoked(proxy, method, args)) }
+ catch { case _: NoClassDefFoundError => sys.exit(1) }
}
}
diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala
index a8d7408532..5067dce384 100644
--- a/src/compiler/scala/tools/util/EditDistance.scala
+++ b/src/compiler/scala/tools/util/EditDistance.scala
@@ -30,23 +30,37 @@ object EditDistance {
if (m == 0) return n
val d = Array.ofDim[Int](n + 1, m + 1)
- 0 to n foreach (x => d(x)(0) = x)
- 0 to m foreach (x => d(0)(x) = x)
+ var i = 0
+ val max = math.max(m, n)
+ while (i <= max) {
+ if (i <= n)
+ d(i)(0) = i
+ if (i <= m)
+ d(0)(i) = i
+ i += 1
+ }
+ i = 1
- for (i <- 1 to n ; val s_i = s(i - 1) ; j <- 1 to m) {
- val t_j = t(j - 1)
- val cost = if (s_i == t_j) 0 else 1
+ while (i <= n) {
+ val s_i = s(i - 1)
+ var j = 1
+ while (j <= m) {
+ val t_j = t(j - 1)
+ val cost = if (s_i == t_j) 0 else 1
- val c1 = d(i - 1)(j) + 1
- val c2 = d(i)(j - 1) + 1
- val c3 = d(i - 1)(j - 1) + cost
+ val c1 = d(i - 1)(j) + 1
+ val c2 = d(i)(j - 1) + 1
+ val c3 = d(i - 1)(j - 1) + cost
- d(i)(j) = c1 min c2 min c3
+ d(i)(j) = c1 min c2 min c3
- if (transpositions) {
- if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
- d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
+ if (transpositions) {
+ if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1))
+ d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost)
+ }
+ j += 1
}
+ i += 1
}
d(n)(m)
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 0c359a2619..6d5988d1dd 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -36,17 +36,21 @@ class JavapClass(
lazy val parser = new JpOptions
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val EnvCtr = EnvClass.getConstructor(List[Class[_]](): _*)
-
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
+ private def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = (
+ if (failed) null
+ else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
+ )
def findBytes(path: String): Array[Byte] =
tryFile(path) getOrElse tryClass(path)
def apply(args: Seq[String]): List[JpResult] = {
- args.toList filterNot (_ startsWith "-") map { path =>
+ if (failed) Nil
+ else args.toList filterNot (_ startsWith "-") map { path =>
val bytes = findBytes(path)
if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
@@ -54,12 +58,14 @@ class JavapClass(
}
def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- PrinterCtr.newInstance(in, printWriter, env)
+ if (failed) null
+ else PrinterCtr.newInstance(in, printWriter, env)
def newEnv(opts: Seq[String]): FakeEnvironment = {
- val env: FakeEnvironment = EnvClass.newInstance()
+ lazy val env: FakeEnvironment = EnvClass.newInstance()
- parser(opts) foreach { case (name, value) =>
+ if (failed) null
+ else parser(opts) foreach { case (name, value) =>
val field = EnvClass getDeclaredField name
field setAccessible true
field.set(env, value.asInstanceOf[AnyRef])
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index 2773aad87f..02eb364abe 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -17,9 +17,10 @@ package util
* @version 1.0
*/
trait StringOps {
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: Seq[String], sep: String) = oempty(xs: _*) mkString sep
+ def onull(s: String) = if (s == null) "" else s
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+ def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
val ys = oempty(xs: _*)
if (ys.isEmpty) orElse else ys mkString sep
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index f4481b800e..8bbda5dd05 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -12,22 +12,37 @@ trait CPSUtils {
var cpsEnabled = true
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
def vprintln(x: =>Any): Unit = if (verbose) println(x)
+
+ object cpsNames {
+ val catches = newTermName("$catches")
+ val ex = newTermName("$ex")
+ val flatMapCatch = newTermName("flatMapCatch")
+ val getTrivialValue = newTermName("getTrivialValue")
+ val isTrivial = newTermName("isTrivial")
+ val reify = newTermName("reify")
+ val reifyR = newTermName("reifyR")
+ val shift = newTermName("shift")
+ val shiftR = newTermName("shiftR")
+ val shiftSuffix = newTermName("$shift")
+ val shiftUnit = newTermName("shiftUnit")
+ val shiftUnitR = newTermName("shiftUnitR")
+ }
- lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
- lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
- lazy val MarkerCPSSynth = definitions.getClass("scala.util.continuations.cpsSynth")
- lazy val MarkerCPSAdaptPlus = definitions.getClass("scala.util.continuations.cpsPlus")
- lazy val MarkerCPSAdaptMinus = definitions.getClass("scala.util.continuations.cpsMinus")
-
- lazy val Context = definitions.getClass("scala.util.continuations.ControlContext")
- lazy val ModCPS = definitions.getModule("scala.util.continuations")
-
- lazy val MethShiftUnit = definitions.getMember(ModCPS, "shiftUnit")
- lazy val MethShiftUnitR = definitions.getMember(ModCPS, "shiftUnitR")
- lazy val MethShift = definitions.getMember(ModCPS, "shift")
- lazy val MethShiftR = definitions.getMember(ModCPS, "shiftR")
- lazy val MethReify = definitions.getMember(ModCPS, "reify")
- lazy val MethReifyR = definitions.getMember(ModCPS, "reifyR")
+ lazy val MarkerCPSSym = definitions.getRequiredClass("scala.util.continuations.cpsSym")
+ lazy val MarkerCPSTypes = definitions.getRequiredClass("scala.util.continuations.cpsParam")
+ lazy val MarkerCPSSynth = definitions.getRequiredClass("scala.util.continuations.cpsSynth")
+ lazy val MarkerCPSAdaptPlus = definitions.getRequiredClass("scala.util.continuations.cpsPlus")
+ lazy val MarkerCPSAdaptMinus = definitions.getRequiredClass("scala.util.continuations.cpsMinus")
+
+ lazy val Context = definitions.getRequiredClass("scala.util.continuations.ControlContext")
+ lazy val ModCPS = definitions.getRequiredModule("scala.util.continuations")
+
+ lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit)
+ lazy val MethShiftUnitR = definitions.getMember(ModCPS, cpsNames.shiftUnitR)
+ lazy val MethShift = definitions.getMember(ModCPS, cpsNames.shift)
+ lazy val MethShiftR = definitions.getMember(ModCPS, cpsNames.shiftR)
+ lazy val MethReify = definitions.getMember(ModCPS, cpsNames.reify)
+ lazy val MethReifyR = definitions.getMember(ModCPS, cpsNames.reifyR)
lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index b383227243..585dc3fbe8 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -355,7 +355,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val valueTpe = removeAllCPSAnnotations(expr.tpe)
- val sym = currentOwner.newValue(tree.pos, unit.fresh.newName("tmp"))
+ val sym = currentOwner.newValue(tree.pos, newTermName(unit.fresh.newName("tmp")))
.setInfo(valueTpe)
.setFlag(Flags.SYNTHETIC)
.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index f0c389bb11..960b27c52f 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -192,21 +192,21 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// val expr2 = if (catches.nonEmpty) {
val pos = catches.head.pos
- val argSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val argSym = currentOwner.newValueParameter(pos, cpsNames.ex).setInfo(ThrowableClass.tpe)
val rhs = Match(Ident(argSym), catches1)
val fun = Function(List(ValDef(argSym)), rhs)
- val funSym = currentOwner.newValueParameter(pos, "$catches").setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funSym = currentOwner.newValueParameter(pos, cpsNames.catches).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
- val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member("flatMapCatch")), List(Ident(funSym))) })
+ val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym))) })
argSym.owner = fun.symbol
val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
chown.traverse(rhs)
- val exSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val exSym = currentOwner.newValueParameter(pos, cpsNames.ex).setInfo(ThrowableClass.tpe)
val catch2 = { localTyper.typedCases(tree, List(
CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
- Apply(Select(Ident(funSym), "isDefinedAt"), List(Ident(exSym))),
+ Apply(Select(Ident(funSym), nme.isDefinedAt), List(Ident(exSym))),
Apply(Ident(funSym), List(Ident(exSym))))
), ThrowableClass.tpe, targettp) }
@@ -317,11 +317,11 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("fun.tpe:"+fun.tpe)
log("return type of fun:"+body1.tpe)
- var methodName = "map"
+ var methodName = nme.map
if (body1.tpe != null) {
if (body1.tpe.typeSymbol == Context)
- methodName = "flatMap"
+ methodName = nme.flatMap
}
else
unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
@@ -347,14 +347,14 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// val <lhs> = ctx.getTrivialValue; ... <--- TODO: try/catch ??? don't bother for the moment...
// else
// ctx.flatMap { <lhs> => ... }
- val ctxSym = currentOwner.newValue(vd.symbol.name + "$shift").setInfo(rhs1.tpe)
+ val ctxSym = currentOwner.newValue(vd.symbol.name append cpsNames.shiftSuffix).setInfo(rhs1.tpe)
val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
def ctxRef = localTyper.typed(Ident(ctxSym))
val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
- val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member("getTrivialValue"))))
+ val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
val switchExpr = localTyper.typed(atPos(vd.symbol.pos) {
val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
- If(Select(ctxRef, ctxSym.tpe.member("isTrivial")),
+ If(Select(ctxRef, ctxSym.tpe.member(cpsNames.isTrivial)),
applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
applyCombinatorFun(ctxRef, body2))
})
diff --git a/src/library/scala/Either.scala b/src/library/scala/Either.scala
index bc75f0f088..e454cdf5ec 100644
--- a/src/library/scala/Either.scala
+++ b/src/library/scala/Either.scala
@@ -575,15 +575,15 @@ object Either {
}
}
- /** If the condition is satisfied, return the given `A` in `Left`,
- * otherwise, return the given `B` in `Right`.
+ /** If the condition is satisfied, return the given `B` in `Right`,
+ * otherwise, return the given `A` in `Left`.
*
* {{{
* val userInput: String = ...
* Either.cond(
* userInput.forall(_.isDigit) && userInput.size == 10,
- * "The input (%s) does not look like a phone number".format(userInput),
- * PhoneNumber(userInput)
+ * PhoneNumber(userInput),
+ * "The input (%s) does not look like a phone number".format(userInput)
* }}}
*/
def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] =
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index c967a48abc..3d85f2f52f 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -48,19 +48,20 @@ import java.util.regex.Pattern
*
* @param initial The initial value from which to count the integers that
* identifies values at run-time.
- * @param names The sequence of names to give to this enumeration's values.
- *
* @author Matthias Zenger
*/
@SerialVersionUID(8476000850333817230L)
-abstract class Enumeration(initial: Int,
- @deprecated("Names should be specified individually or discovered via reflection", "2.10")
- names: String*) extends Serializable {
+abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
-
- @deprecated("Names should be specified individually or discovered via reflection", "2.10")
+
+ @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
+ def this(initial: Int, names: String*) = {
+ this(initial)
+ this.nextName = names.iterator
+ }
+ @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
def this(names: String*) = this(0, names: _*)
/* Note that `readResolve` cannot be private, since otherwise
@@ -97,12 +98,13 @@ abstract class Enumeration(initial: Int,
}
/** The integer to use to identify the next created value. */
- protected var nextId = initial
+ protected var nextId: Int = initial
/** The string to use to name the next created value. */
- protected var nextName = names.iterator
+ protected var nextName: Iterator[String] = _
+
private def nextNameOrNull =
- if (nextName.hasNext) nextName.next else null
+ if (nextName != null && nextName.hasNext) nextName.next else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -120,17 +122,8 @@ abstract class Enumeration(initial: Int,
*/
final def apply(x: Int): Value = vmap(x)
- /**
- * Return a `Value` from this `Enumeration` whose name matches
- * the argument `s`.
- *
- * You can pass a String* set of names to the constructor, or initialize
- * each `Enumeration` with `Value(String)`. Otherwise, the names are
- * determined automatically through reflection.
- *
- * Note the change here wrt 2.7 is intentional. You should know whether
- * a name is in an `Enumeration` beforehand. If not, just use find on
- * values.
+ /** Return a `Value` from this `Enumeration` whose name matches
+ * the argument `s`. The names are determined automatically via reflection.
*
* @param s an `Enumeration` name
* @return the `Value` of this `Enumeration` if its name matches `s`
diff --git a/src/library/scala/MatchingStrategy.scala b/src/library/scala/MatchingStrategy.scala
index 4eaf7852b8..d11598bad6 100644
--- a/src/library/scala/MatchingStrategy.scala
+++ b/src/library/scala/MatchingStrategy.scala
@@ -1,32 +1,27 @@
package scala
abstract class MatchingStrategy[M[+x]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: T)(matcher: T => M[U]): U
+
def zero: M[Nothing]
def one[T](x: T): M[T]
- def guard[T](cond: Boolean, then: => T): M[T] // = if(cond) one(then) else zero
- def altFlatMap[T, U](f: T => M[U])(a: M[U], b: M[T]): M[U] // = a orElse b.flatMap(f) -- can't easily&efficiently express M[T] should have flatMap and orElse
- def runOrElse[T, U](x: T)(f: T => M[U]): U
- def isSuccess[T, U](x: T)(f: T => M[U]): Boolean
-
- // find the first alternative to successfully flatMap f
- // to avoid code explosion due to alternatives
- def or[T, U](f: T => M[U], alts: M[T]*) = (alts foldLeft (zero: M[U]))(altFlatMap(f))
+ def guard[T](cond: Boolean, then: => T): M[T]
+ def isSuccess[T, U](x: T)(f: T => M[U]): Boolean // used for isDefinedAt
def caseResult[T](x: T): M[T] = one(x) // used as a marker to distinguish the RHS of a case (case pat => RHS) and intermediate successes
- // when deriving a partial function from a pattern match, we need to
- // distinguish the RHS of a case, which should not be evaluated when computing isDefinedAt,
+ // when deriving a partial function from a pattern match,
+ // we need to distinguish the RHS of a case, which should not be evaluated when computing isDefinedAt,
// from an intermediate result (which must be computed)
-
}
object MatchingStrategy {
implicit object OptionMatchingStrategy extends MatchingStrategy[Option] {
type M[+x] = Option[x]
- @inline def guard[T](cond: Boolean, then: => T): M[T] = if(cond) Some(then) else None
+ @inline def runOrElse[T, U](x: T)(f: T => M[U]): U = f(x) getOrElse (throw new MatchError(x))
@inline def zero: M[Nothing] = None
@inline def one[T](x: T): M[T] = Some(x)
- @inline def altFlatMap[T, U](f: T => M[U])(a: M[U], b: M[T]): M[U] = a orElse b.flatMap(f)
- @inline def runOrElse[T, U](x: T)(f: T => M[U]): U = f(x) getOrElse (throw new MatchError(x))
+ @inline def guard[T](cond: Boolean, then: => T): M[T] = if(cond) Some(then) else None
@inline def isSuccess[T, U](x: T)(f: T => M[U]): Boolean = !f(x).isEmpty
}
} \ No newline at end of file
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index bd498de847..6db4904b93 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -192,6 +192,13 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def exists(p: A => Boolean): Boolean =
!isEmpty && p(this.get)
+ /** Returns true if this option is empty '''or''' the predicate
+ * $p returns true when applied to this $option's value.
+ *
+ * @param p the predicate to test
+ */
+ @inline final def forall(p: A => Boolean): Boolean = isEmpty || p(this.get)
+
/** Apply the given procedure $f to the option's value,
* if it is nonempty. Otherwise, do nothing.
*
diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala
index 8a17ae87b0..8851f1ab91 100644
--- a/src/library/scala/Symbol.scala
+++ b/src/library/scala/Symbol.scala
@@ -31,8 +31,8 @@ final class Symbol private (val name: String) extends Serializable {
override def equals(other: Any) = this eq other.asInstanceOf[AnyRef]
}
-object Symbol extends UniquenessCache[String, Symbol]
-{
+object Symbol extends UniquenessCache[String, Symbol] {
+ override def apply(name: String): Symbol = super.apply(name)
protected def valueFromKey(name: String): Symbol = new Symbol(name)
protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name)
}
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index c2e392b807..8ab12a7c8e 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -14,15 +14,17 @@ package scala.annotation
* reason or another retain the same name and type signature,
* but some aspect of their behavior is different. An illustrative
* examples is Stack.iterator, which reversed from LIFO to FIFO
- * order between scala 2.7 and 2.8.
+ * order between Scala 2.7 and 2.8.
*
- * The version numbers are to mark the scala major/minor release
- * version where the change took place.
+ * @param message A message describing the change, which is emitted
+ * by the compiler if the flag `-Xmigration` is set.
+ *
+ * @param changedIn The version, in which the behaviour change was
+ * introduced.
*
* @since 2.8
*/
-private[scala] final class migration(
- majorVersion: Int,
- minorVersion: Int,
- message: String)
-extends annotation.StaticAnnotation {}
+ private[scala] final class migration(message: String, changedIn: String) extends annotation.StaticAnnotation {
+ @deprecated("Use the constructor taking two Strings instead.", "2.10")
+ def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
+ } \ No newline at end of file
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index dc89bcf85d..122eec2d90 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -123,10 +123,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* @param bf $bfinfo
* @return collection with intermediate results
*/
- @migration(2, 9,
- "This scanRight definition has changed in 2.9.\n" +
- "The previous behavior can be reproduced with scanRight.reverse."
- )
+ @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0")
def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Applies a function `f` to all elements of this $coll.
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 9f7bbadfb2..78e0773fb0 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -12,8 +12,6 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
import TraversableView.NoBuilder
-import annotation.migration
-
trait GenTraversableViewLike[+A,
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 7b780ba2e4..d46d215e0c 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -9,7 +9,7 @@
package scala.collection
import mutable.ArrayBuffer
-import annotation.{ tailrec, migration }
+import annotation.migration
import immutable.Stream
/** The `Iterator` object provides various functions for creating specialized iterators.
@@ -52,7 +52,7 @@ object Iterator {
/** Creates iterator that produces the results of some element computation a number of times.
*
- * @param n the number of elements returned by the iterator.
+ * @param len the number of elements returned by the iterator.
* @param elem the element computation
* @return An iterator that produces the results of `n` evaluations of `elem`.
*/
@@ -66,7 +66,7 @@ object Iterator {
/** Creates an iterator producing the values of a given function over a range of integer values starting from 0.
*
- * @param n The number of elements returned by the iterator
+ * @param end The number of elements returned by the iterator
* @param f The function computing element values
* @return An iterator that produces the values `f(0), ..., f(n -1)`.
*/
@@ -410,10 +410,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* which `pf` is defined the image `pf(x)`.
* @note Reuse: $consumesAndProducesIterator
*/
- @migration(2, 8,
- "This collect implementation bears no relationship to the one before 2.8.\n"+
- "The previous behavior can be reproduced with toSeq."
- )
+ @migration("`collect` has changed. The previous behavior can be reproduced with `toSeq`.", "2.8.0")
def collect[B](pf: PartialFunction[A, B]): Iterator[B] = {
val self = buffered
new AbstractIterator[B] {
@@ -1033,9 +1030,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Returns this iterator with patched values.
*
- * @param from The start index from which to patch
- * @param ps The iterator of patch values
- * @param replaced The number of values in the original iterator that are replaced by the patch.
+ * @param from The start index from which to patch
+ * @param patchElems The iterator of patch values
+ * @param replaced The number of values in the original iterator that are replaced by the patch.
* @note Reuse: $consumesTwoAndProducesOneIterator
*/
def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] {
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 75c1edac66..ceb980ff80 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -13,6 +13,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
+import annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
@@ -69,4 +70,9 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
xs
}
}
+
+ @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
+ if (this.isEmpty) that.isEmpty
+ else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p)
+ }
}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 1653a011d6..07116e99dd 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -182,14 +182,14 @@ self =>
*
* @return the keys of this map as an iterable.
*/
- @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
def keys: Iterable[A] = keySet
/** Collects all values of this map in an iterable collection.
*
* @return the values of this map as an iterable.
*/
- @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
def values: Iterable[B] = new DefaultValuesIterable
/** The implementation class of the iterable returned by `values`.
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index b4695363de..7293f3775c 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -89,7 +89,7 @@ self =>
// note: this is only overridden here to add the migration annotation,
// which I hope to turn into an Xlint style warning as the migration aspect
// is not central to its importance.
- @migration(2, 8, "Set.map now returns a Set, so it will discard duplicate values.")
+ @migration("Set.map now returns a Set, so it will discard duplicate values.", "2.8.0")
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf)
/** Tests if some element is contained in this set.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 4f0fec1de3..e2acc0b3e0 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -390,10 +390,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- @migration(2, 9,
- "This scanRight definition has changed in 2.9.\n" +
- "The previous behavior can be reproduced with scanRight.reverse."
- )
+ @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0")
def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
var scanned = List(z)
var acc = z
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 8c67d841bc..fbecad98fe 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -185,16 +185,19 @@ trait TraversableViewLike[+A,
override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That]
- @migration(2, 9,
- "This scanRight definition has changed in 2.9.\n" +
- "The previous behavior can be reproduced with scanRight.reverse."
- )
+ @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0")
override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: A => K): immutable.Map[K, This] =
thisSeq groupBy f mapValues (xs => newForced(xs))
+ override def unzip[A1, A2](implicit asPair: A => (A1, A2)) =
+ (newMapped(x => asPair(x)._1), newMapped(x => asPair(x)._2)) // TODO - Performance improvements.
+
+ override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) =
+ (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3)) // TODO - Performance improvements.
+
override def toString = viewToString
}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 10cd3b5755..12c1a75c7a 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -13,7 +13,6 @@ package generic
import mutable.Builder
import annotation.migration
-import annotation.bridge
import annotation.unchecked.uncheckedVariance
/** A template class for companion objects of ``regular`` collection classes
@@ -148,7 +147,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* @throws `IllegalArgumentException` if all collections in this $coll
* are not of the same size.
*/
- @migration(2, 9, "As of 2.9, transpose throws an exception if collections are not uniformly sized.")
+ @migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
def transpose[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[CC[B] @uncheckedVariance] = {
if (isEmpty)
return genericBuilder[CC[B]].result
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 531eac6c01..c6f056bd81 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -277,6 +277,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
override def toStream : Stream[A] =
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
+
+ @deprecated("use `distinct` instead", "2.8.0")
+ def removeDuplicates: List[A] = distinct
}
/** The empty list.
@@ -343,6 +346,8 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
*/
object List extends SeqFactory[List] {
+ import scala.collection.{Iterable, Seq, IndexedSeq}
+
/** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -352,6 +357,248 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
+
+ /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)`
+ * where `v,,0,, = start` and elements are in the range between `start`
+ * (inclusive) and `end` (exclusive).
+ *
+ * @param start the start value of the list
+ * @param end the end value of the list
+ * @param step the increment function of the list, which given `v,,n,,`,
+ * computes `v,,n+1,,`. Must be monotonically increasing
+ * or decreasing.
+ * @return the sorted list of all integers in range `[start;end)`.
+ */
+ @deprecated("use `iterate` instead", "2.8.0")
+ def range(start: Int, end: Int, step: Int => Int): List[Int] = {
+ val up = step(start) > start
+ val down = step(start) < start
+ val b = new ListBuffer[Int]
+ var i = start
+ while ((!up || i < end) && (!down || i > end)) {
+ b += i
+ val next = step(i)
+ if (i == next)
+ throw new IllegalArgumentException("the step function did not make any progress on "+ i)
+ i = next
+ }
+ b.toList
+ }
+
+ /** Create a list containing several copies of an element.
+ *
+ * @param n the length of the resulting list
+ * @param elem the element composing the resulting list
+ * @return a list composed of `n` elements all equal to `elem`
+ */
+ @deprecated("use `fill` instead", "2.8.0")
+ def make[A](n: Int, elem: A): List[A] = {
+ val b = new ListBuffer[A]
+ var i = 0
+ while (i < n) {
+ b += elem
+ i += 1
+ }
+ b.toList
+ }
+
+ /** Concatenate all the elements of a given list of lists.
+ *
+ * @param xss the list of lists that are to be concatenated
+ * @return the concatenation of all the lists
+ */
+ @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
+ def flatten[A](xss: List[List[A]]): List[A] = {
+ val b = new ListBuffer[A]
+ for (xs <- xss) {
+ var xc = xs
+ while (!xc.isEmpty) {
+ b += xc.head
+ xc = xc.tail
+ }
+ }
+ b.toList
+ }
+
+ /** Transforms a list of pairs into a pair of lists.
+ *
+ * @param xs the list of pairs to unzip
+ * @return a pair of lists.
+ */
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
+ def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
+ val b1 = new ListBuffer[A]
+ val b2 = new ListBuffer[B]
+ var xc = xs
+ while (!xc.isEmpty) {
+ b1 += xc.head._1
+ b2 += xc.head._2
+ xc = xc.tail
+ }
+ (b1.toList, b2.toList)
+ }
+
+ /** Transforms an iterable of pairs into a pair of lists.
+ *
+ * @param xs the iterable of pairs to unzip
+ * @return a pair of lists.
+ */
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
+ def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
+ xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
+ case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
+ }
+
+ /**
+ * Returns the `Left` values in the given `Iterable` of `Either`s.
+ */
+ @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
+ def lefts[A, B](es: Iterable[Either[A, B]]) =
+ es.foldRight[List[A]](Nil)((e, as) => e match {
+ case Left(a) => a :: as
+ case Right(_) => as
+ })
+
+ /**
+ * Returns the `Right` values in the given `Iterable` of `Either`s.
+ */
+ @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
+ def rights[A, B](es: Iterable[Either[A, B]]) =
+ es.foldRight[List[B]](Nil)((e, bs) => e match {
+ case Left(_) => bs
+ case Right(b) => b :: bs
+ })
+
+ /** Transforms an Iterable of Eithers into a pair of lists.
+ *
+ * @param xs the iterable of Eithers to separate
+ * @return a pair of lists.
+ */
+ @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
+ def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
+ es.foldRight[(List[A], List[B])]((Nil, Nil)) {
+ case (Left(a), (lefts, rights)) => (a :: lefts, rights)
+ case (Right(b), (lefts, rights)) => (lefts, b :: rights)
+ }
+
+ /** Converts an iterator to a list.
+ *
+ * @param it the iterator to convert
+ * @return a list that contains the elements returned by successive
+ * calls to `it.next`
+ */
+ @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
+ def fromIterator[A](it: Iterator[A]): List[A] = it.toList
+
+ /** Converts an array into a list.
+ *
+ * @param arr the array to convert
+ * @return a list that contains the same elements than `arr`
+ * in the same order
+ */
+ @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
+ def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
+
+ /** Converts a range of an array into a list.
+ *
+ * @param arr the array to convert
+ * @param start the first index to consider
+ * @param len the length of the range to convert
+ * @return a list that contains the same elements than `arr`
+ * in the same order
+ */
+ @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
+ def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
+ var res: List[A] = Nil
+ var i = start + len
+ while (i > start) {
+ i -= 1
+ res = arr(i) :: res
+ }
+ res
+ }
+
+ /** Returns the list resulting from applying the given function `f`
+ * to corresponding elements of the argument lists.
+ *
+ * @param f function to apply to each pair of elements.
+ * @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are
+ * `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and
+ * `n = min(k,l)`
+ */
+ @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
+ def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
+ val b = new ListBuffer[C]
+ var xc = xs
+ var yc = ys
+ while (!xc.isEmpty && !yc.isEmpty) {
+ b += f(xc.head, yc.head)
+ xc = xc.tail
+ yc = yc.tail
+ }
+ b.toList
+ }
+
+ /** Tests whether the given predicate `p` holds
+ * for all corresponding elements of the argument lists.
+ *
+ * @param p function to apply to each pair of elements.
+ * @return `(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
+ * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]`
+ * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
+ * `[b<sub>0</sub>, ..., b<sub>l</sub>]`
+ * and `n = min(k,l)`
+ */
+ @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
+ def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
+ var xc = xs
+ var yc = ys
+ while (!xc.isEmpty && !yc.isEmpty) {
+ if (!f(xc.head, yc.head)) return false
+ xc = xc.tail
+ yc = yc.tail
+ }
+ true
+ }
+
+ /** Tests whether the given predicate `p` holds
+ * for some corresponding elements of the argument lists.
+ *
+ * @param p function to apply to each pair of elements.
+ * @return `n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
+ * ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
+ * `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
+ * `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
+ * `n = min(k,l)`
+ */
+ @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
+ def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
+ var xc = xs
+ var yc = ys
+ while (!xc.isEmpty && !yc.isEmpty) {
+ if (f(xc.head, yc.head)) return true
+ xc = xc.tail
+ yc = yc.tail
+ }
+ false
+ }
+
+ /** Transposes a list of lists.
+ * pre: All element lists have the same length.
+ *
+ * @param xss the list of lists
+ * @return the transposed list of lists
+ */
+ @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
+ def transpose[A](xss: List[List[A]]): List[List[A]] = {
+ val buf = new ListBuffer[List[A]]
+ var yss = xss
+ while (!yss.head.isEmpty) {
+ buf += (yss map (_.head))
+ yss = (yss map (_.tail))
+ }
+ buf.toList
+ }
}
/** Only used for list serialization */
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 45cf088dd9..bbefd983fd 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -47,6 +47,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d)
/** The same map with a given default value.
+ * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index e891f8bec8..c92c0268b6 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -51,15 +51,35 @@ extends collection.AbstractSeq[Int]
{
override def par = new ParRange(this)
- // This member is designed to enforce conditions:
- // (step != 0) && (length <= Int.MaxValue),
- // but cannot be evaluated eagerly because we have a pattern where ranges
- // are constructed like: "x to y by z"
- // The "x to y" piece should not trigger an exception. So the calculation
- // is delayed, which means it will not fail fast for those cases where failing
- // was correct.
- private lazy val numRangeElements: Int = Range.count(start, end, step, isInclusive)
-
+ private def gap = end.toLong - start.toLong
+ private def isExact = gap % step == 0
+ private def hasStub = isInclusive || !isExact
+ private def longLength = gap / step + ( if (hasStub) 1 else 0 )
+
+ // Check cannot be evaluated eagerly because we have a pattern where
+ // ranges are constructed like: "x to y by z" The "x to y" piece
+ // should not trigger an exception. So the calculation is delayed,
+ // which means it will not fail fast for those cases where failing was
+ // correct.
+ override final val isEmpty = (
+ (start > end && step > 0)
+ || (start < end && step < 0)
+ || (start == end && !isInclusive)
+ )
+ final val numRangeElements: Int = {
+ if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
+ else if (isEmpty) 0
+ else {
+ val len = longLength
+ if (len > scala.Int.MaxValue) -1
+ else len.toInt
+ }
+ }
+ final val lastElement = start + (numRangeElements - 1) * step
+ final val terminalElement = start + numRangeElements * step
+
+ override def last = if (isEmpty) Nil.last else lastElement
+
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Create a new range with the `start` and `end` values of this range and
@@ -71,31 +91,49 @@ extends collection.AbstractSeq[Int]
def isInclusive = false
+ override def size = length
+ override def length = if (numRangeElements < 0) fail() else numRangeElements
+
+ private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
+ private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
+ private def validateMaxLength() {
+ if (numRangeElements < 0)
+ fail()
+ }
+
+ def validateRangeBoundaries(f: Int => Any): Boolean = {
+ validateMaxLength()
+
+ start != Int.MinValue || end != Int.MinValue || {
+ var count = 0
+ var num = start
+ while (count < numRangeElements) {
+ f(num)
+ count += 1
+ num += step
+ }
+ false
+ }
+ }
+
+ @inline final def apply(idx: Int): Int = {
+ validateMaxLength()
+ if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
+ else start + (step * idx)
+ }
+
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
- if (length > 0) {
- val last = this.last
+ if (validateRangeBoundaries(f)) {
var i = start
- while (i != last) {
+ val terminal = terminalElement
+ val step = this.step
+ while (i != terminal) {
f(i)
i += step
}
- f(i)
}
}
- override def length: Int = numRangeElements
- override lazy val last: Int =
- if (length == 0) Nil.last
- else locationAfterN(length - 1)
-
- final override def isEmpty = length == 0
-
- @inline
- final def apply(idx: Int): Int = {
- if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
- locationAfterN(idx)
- }
-
/** Creates a new range containing the first `n` elements of this range.
*
* $doesNotUseBuilders
@@ -104,8 +142,8 @@ extends collection.AbstractSeq[Int]
* @return a new range consisting of `n` first elements.
*/
final override def take(n: Int): Range = (
- if (n <= 0 || length == 0) newEmptyRange(start)
- else if (n >= length) this
+ if (n <= 0 || isEmpty) newEmptyRange(start)
+ else if (n >= numRangeElements) this
else new Range.Inclusive(start, locationAfterN(n - 1), step)
)
@@ -117,8 +155,8 @@ extends collection.AbstractSeq[Int]
* @return a new range consisting of all the elements of this range except `n` first elements.
*/
final override def drop(n: Int): Range = (
- if (n <= 0 || length == 0) this
- else if (n >= length) newEmptyRange(end)
+ if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements) newEmptyRange(end)
else copy(locationAfterN(n), end, step)
)
@@ -153,7 +191,7 @@ extends collection.AbstractSeq[Int]
var current = start
var counted = 0
- while (counted < length && p(current)) {
+ while (counted < numRangeElements && p(current)) {
counted += 1
current += step
}
@@ -161,7 +199,7 @@ extends collection.AbstractSeq[Int]
}
// Tests whether a number is within the endpoints, without testing
// whether it is a member of the sequence (i.e. when step > 1.)
- private def isWithinBoundaries(elem: Int) = (length > 0) && (
+ private def isWithinBoundaries(elem: Int) = !isEmpty && (
(step > 0 && start <= elem && elem <= last ) ||
(step < 0 && last <= elem && elem <= start)
)
@@ -190,21 +228,21 @@ extends collection.AbstractSeq[Int]
*
* $doesNotUseBuilders
*/
- final override def takeRight(n: Int): Range = drop(length - n)
+ final override def takeRight(n: Int): Range = drop(numRangeElements - n)
/** Creates a new range consisting of the initial `length - n` elements of the range.
*
* $doesNotUseBuilders
*/
- final override def dropRight(n: Int): Range = take(length - n)
+ final override def dropRight(n: Int): Range = take(numRangeElements - n)
/** Returns the reverse of this range.
*
* $doesNotUseBuilders
*/
final override def reverse: Range =
- if (length > 0) new Range.Inclusive(last, start, -step)
- else this
+ if (isEmpty) this
+ else new Range.Inclusive(last, start, -step)
/** Make range inclusive.
*/
@@ -215,10 +253,9 @@ extends collection.AbstractSeq[Int]
final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0)
final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
- val len = length
- if (len == 0) 0
- else if (len == 1) head
- else (len.toLong * (head + last) / 2).toInt
+ if (isEmpty) 0
+ else if (numRangeElements == 1) head
+ else (numRangeElements.toLong * (head + last) / 2).toInt
}
override def toIterable = this
@@ -228,7 +265,7 @@ extends collection.AbstractSeq[Int]
override def equals(other: Any) = other match {
case x: Range =>
(x canEqual this) && (length == x.length) && (
- (length == 0) || // all empty sequences are equal
+ isEmpty || // all empty sequences are equal
(start == x.start && last == x.last) // same length and same endpoints implies equality
)
case _ =>
@@ -239,7 +276,7 @@ extends collection.AbstractSeq[Int]
*/
override def toString() = {
- val endStr = if (length > Range.MAX_PRINT) ", ... )" else ")"
+ val endStr = if (numRangeElements > Range.MAX_PRINT) ", ... )" else ")"
take(Range.MAX_PRINT).mkString("Range(", ", ", endStr)
}
}
@@ -350,3 +387,4 @@ object Range {
// super.foreach(f)
}
}
+ \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index acf26e59eb..1dc2fc27d5 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -152,7 +152,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
def prepend(elems: A*) { prependAll(elems) }
/** Prepends the elements contained in a traversable object to this buffer.
- * @param elems the collection containing the elements to prepend.
+ * @param xs the collection containing the elements to prepend.
*/
def prependAll(xs: TraversableOnce[A]) { xs ++=: this }
@@ -220,10 +220,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param xs the traversable object.
* @return a new collection consisting of all the elements of this collection and `xs`.
*/
- @migration(2, 8,
- "As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
- "Use ++= instead if you intend to add by side effect to an existing collection.\n"
- )
+ @migration("`++` creates a new buffer. Use `++=` to add an element from this buffer and return that buffer itself.", "2.8.0")
def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
@bridge
@@ -234,10 +231,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param elem the element to remove.
* @return a new collection consisting of all the elements of this collection except `elem`.
*/
- @migration(2, 8,
- "As of 2.8, - always creates a new collection, even on Buffers.\n"+
- "Use -= instead if you intend to remove by side effect from an existing collection.\n"
- )
+ @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def -(elem: A): This = clone() -= elem
/** Creates a new collection with all the elements of this collection except the two
@@ -249,10 +243,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @return a new collection consisting of all the elements of this collection except
* `elem1`, `elem2` and those in `elems`.
*/
- @migration(2, 8,
- "As of 2.8, - always creates a new collection, even on Buffers.\n"+
- "Use -= instead if you intend to remove by side effect from an existing collection.\n"
- )
+ @migration("`-` creates a new buffer. Use `-=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def -(elem1: A, elem2: A, elems: A*): This = clone() -= elem1 -= elem2 --= elems
/** Creates a new collection with all the elements of this collection except those
@@ -262,10 +253,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @return a new collection with all the elements of this collection except
* those in `xs`
*/
- @migration(2, 8,
- "As of 2.8, -- always creates a new collection, even on Buffers.\n"+
- "Use --= instead if you intend to remove by side effect from an existing collection.\n"
- )
+ @migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
@bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index 7ad2f9558f..dfb70beeda 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -91,7 +91,7 @@ trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]]
* current node, i.e. `this` node itself will still point "into" the list it
* was in.
*/
- @migration(2, 9, "Double linked list now removes the current node from the list.")
+ @migration("Double linked list now removes the current node from the list.", "2.9.0")
def remove(): Unit = if (nonEmpty) {
next.prev = prev
if (prev ne null) prev.next = next // because this could be the first node
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 0740d97e09..f3fb6738eb 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -24,7 +24,7 @@ package mutable
trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
import FlatHashTable._
- private final val tableDebug = false
+ private final def tableDebug = false
@transient private[collection] var _loadFactor = defaultLoadFactor
@@ -43,11 +43,19 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/** The array keeping track of number of elements in 32 element blocks.
*/
@transient protected var sizemap: Array[Int] = null
-
+
+ @transient var seedvalue: Int = tableSizeSeed
+
import HashTable.powerOfTwo
+
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+
private def initialCapacity = capacity(initialSize)
-
+
+ protected def randomSeed = seedGenerator.get.nextInt()
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
/**
* Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
@@ -57,23 +65,25 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
in.defaultReadObject
-
- _loadFactor = in.readInt
+
+ _loadFactor = in.readInt()
assert(_loadFactor > 0)
-
- val size = in.readInt
+
+ val size = in.readInt()
tableSize = 0
assert(size >= 0)
-
+
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
threshold = newThreshold(_loadFactor, table.size)
-
- val smDefined = in.readBoolean
+
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
if (smDefined) sizeMapInit(table.length) else sizemap = null
-
+
var index = 0
while (index < size) {
- val elem = in.readObject.asInstanceOf[A]
+ val elem = in.readObject().asInstanceOf[A]
f(elem)
addEntry(elem)
index += 1
@@ -89,6 +99,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
+ out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
iterator.foreach(out.writeObject)
}
@@ -125,6 +136,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
if (entry == elem) return false
h = (h + 1) % table.length
entry = table(h)
+ //Statistics.collisions += 1
}
table(h) = elem.asInstanceOf[AnyRef]
tableSize = tableSize + 1
@@ -185,6 +197,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table = new Array[AnyRef](table.length * 2)
tableSize = 0
nnSizeMapReset(table.length)
+ seedvalue = tableSizeSeed
threshold = newThreshold(_loadFactor, table.length)
var i = 0
while (i < oldtable.length) {
@@ -280,10 +293,24 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/* End of size map handling code */
protected final def index(hcode: Int) = {
+ // version 1 (no longer used - did not work with parallel hash tables)
// improve(hcode) & (table.length - 1)
- val improved = improve(hcode)
+
+ // version 2 (allows for parallel hash table construction)
+ val improved = improve(hcode, seedvalue)
val ones = table.length - 1
(improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
+
+ // version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables)
+ // val hc = improve(hcode)
+ // val bbp = blockbitpos
+ // val ones = table.length - 1
+ // val needed = Integer.bitCount(ones)
+ // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5)
+ // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc)
+ // val restmask = (1 << (needed - 5)) - 1
+ // val improved = blockbits | (rest & restmask)
+ // improved
}
protected def clearTable() {
@@ -298,6 +325,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table,
tableSize,
threshold,
+ seedvalue,
sizemap
)
@@ -307,6 +335,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table = c.table
tableSize = c.tableSize
threshold = c.threshold
+ seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
@@ -315,21 +344,30 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
-
private[collection] object FlatHashTable {
-
+
+ /** Creates a specific seed to improve hashcode of a hash table instance
+ * and ensure that iteration order vulnerabilities are not 'felt' in other
+ * hash tables.
+ *
+ * See SI-5293.
+ */
+ final def seedGenerator = new ThreadLocal[util.Random] {
+ override def initialValue = new util.Random
+ }
+
/** The load factor for the hash table; must be < 500 (0.5)
*/
- private[collection] def defaultLoadFactor: Int = 450
- private[collection] final def loadFactorDenum = 1000
+ def defaultLoadFactor: Int = 450
+ final def loadFactorDenum = 1000
/** The initial size of the hash table.
*/
- private[collection] def initialSize: Int = 16
+ def initialSize: Int = 32
- private[collection] def sizeForThreshold(size: Int, _loadFactor: Int) = (size.toLong * loadFactorDenum / _loadFactor).toInt
+ def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
- private[collection] def newThreshold(_loadFactor: Int, size: Int) = {
+ def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5")
(size.toLong * lf / loadFactorDenum ).toInt
@@ -340,6 +378,7 @@ private[collection] object FlatHashTable {
val table: Array[AnyRef],
val tableSize: Int,
val threshold: Int,
+ val seedvalue: Int,
val sizemap: Array[Int]
)
@@ -352,16 +391,24 @@ private[collection] object FlatHashTable {
if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.")
else elem.hashCode()
- protected final def improve(hcode: Int) = {
- // var h: Int = hcode + ~(hcode << 9)
- // h = h ^ (h >>> 14)
- // h = h + (h << 4)
- // h ^ (h >>> 10)
+ protected final def improve(hcode: Int, seed: Int) = {
+ //var h: Int = hcode + ~(hcode << 9)
+ //h = h ^ (h >>> 14)
+ //h = h + (h << 4)
+ //h ^ (h >>> 10)
+
var i = hcode * 0x9e3775cd
i = java.lang.Integer.reverseBytes(i)
- i * 0x9e3775cd
+ val improved = i * 0x9e3775cd
+
+ // for the remainder, see SI-5293
+ // to ensure that different bits are used for different hash tables, we have to rotate based on the seed
+ val rotation = seed % 32
+ val rotated = (improved >>> rotation) | (improved << (32 - rotation))
+ rotated
}
}
}
+
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 32fca1117c..3232179dbb 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -46,12 +46,12 @@ extends AbstractMap[A, B]
override def keysIterator: Iterator[A] = imap.keysIterator
- @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ @migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0")
override def keys: collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
- @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ @migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0")
override def values: collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index e4b1458e53..b08a4b7bc9 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -90,10 +90,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param kv the key/value mapping to be added
* @return a new map containing mappings of this map and the mapping `kv`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To add an element as a\n"+
- "side effect to an existing map and return that map itself, use +=."
- )
+ @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0")
def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = clone().asInstanceOf[Map[A, B1]] += kv
/** Creates a new map containing two or more key/value mappings and all the key/value
@@ -106,10 +103,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param elems the remaining elements to add.
* @return a new map containing mappings of this map and two or more specified mappings.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To add an element as a\n"+
- "side effect to an existing map and return that map itself, use +=."
- )
+ @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0")
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] =
clone().asInstanceOf[Map[A, B1]] += elem1 += elem2 ++= elems
@@ -121,10 +115,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param xs the traversable object.
* @return a new map containing mappings of this map and those provided by `xs`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To add the elements as a\n"+
- "side effect to an existing map and return that map itself, use ++=."
- )
+ @migration("`++` creates a new map. Use `++=` to add an element to this map and return that map itself.", "2.8.0")
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
clone().asInstanceOf[Map[A, B1]] ++= xs.seq
@@ -154,10 +145,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param key the key to be removed
* @return a new map with all the mappings of this map except that with a key `key`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To remove an element as a\n"+
- "side effect to an existing map and return that map itself, use -=."
- )
+ @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0")
override def -(key: A): This = clone() -= key
/** Removes all bindings from the map. After this operation has completed,
@@ -223,10 +211,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map containing all the mappings of this map except mappings
* with a key equal to `elem1`, `elem2` or any of `elems`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To remove an element as a\n"+
- "side effect to an existing map and return that map itself, use -=."
- )
+ @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0")
override def -(elem1: A, elem2: A, elems: A*): This =
clone() -= elem1 -= elem2 --= elems
@@ -237,10 +222,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @return a new map with all the key/value mappings of this map except mappings
* with a key equal to a key from `xs`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new map. To remove the elements as a\n"+
- "side effect to an existing map and return that map itself, use --=."
- )
+ @migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
@bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 23a68c1d3e..381cb09e18 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import annotation.{migration, bridge}
+import annotation.bridge
/** This class implements priority queues using a heap.
* To prioritize elements of type A there must be an implicit
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index c5eeb1ae7f..5e201d9959 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -141,10 +141,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @param elem the element to add.
* @return a new set consisting of elements of this set and `elem`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To add an element as a\n"+
- "side effect to an existing set and return that set itself, use +=."
- )
+ @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0")
override def + (elem: A): This = clone() += elem
/** Creates a new set consisting of all the elements of this set and two or more
@@ -158,10 +155,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @return a new set consisting of all the elements of this set, `elem1`,
* `elem2` and those in `elems`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To add the elements as a\n"+
- "side effect to an existing set and return that set itself, use +=."
- )
+ @migration("`+` creates a new set. Use `+=` to add an element to this set and return that set itself.", "2.8.0")
override def + (elem1: A, elem2: A, elems: A*): This =
clone() += elem1 += elem2 ++= elems
@@ -173,10 +167,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @param xs the traversable object.
* @return a new set consisting of elements of this set and those in `xs`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To add the elements as a\n"+
- "side effect to an existing set and return that set itself, use ++=."
- )
+ @migration("`++` creates a new set. Use `++=` to add elements to this set and return that set itself.", "2.8.0")
override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
@bridge def ++(xs: TraversableOnce[A]): This = ++(xs: GenTraversableOnce[A])
@@ -186,10 +177,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @param elem the element to remove.
* @return a new set consisting of all the elements of this set except `elem`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To remove the element as a\n"+
- "side effect to an existing set and return that set itself, use -=."
- )
+ @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0")
override def -(elem: A): This = clone() -= elem
/** Creates a new set consisting of all the elements of this set except the two
@@ -201,10 +189,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @return a new set consisting of all the elements of this set except
* `elem1`, `elem2` and `elems`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
- "side effect to an existing set and return that set itself, use -=."
- )
+ @migration("`-` creates a new set. Use `-=` to remove an element from this set and return that set itself.", "2.8.0")
override def -(elem1: A, elem2: A, elems: A*): This =
clone() -= elem1 -= elem2 --= elems
@@ -215,10 +200,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @return a new set consisting of all the elements of this set except
* elements from `xs`.
*/
- @migration(2, 8,
- "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
- "side effect to an existing set and return that set itself, use --=."
- )
+ @migration("`--` creates a new set. Use `--=` to remove elements from this set and return that set itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
@bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index ffac3b78b7..8fad131009 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -156,17 +156,17 @@ extends AbstractSeq[A]
*
* @return an iterator over all stack elements.
*/
- @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
+ @migration("`iterator` traverses in FIFO order.", "2.8.0")
override def iterator: Iterator[A] = elems.iterator
/** Creates a list of all stack elements in LIFO order.
*
* @return the created list.
*/
- @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
+ @migration("`toList` traverses in FIFO order.", "2.8.0")
override def toList: List[A] = elems
- @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
+ @migration("`foreach` traverses in FIFO order.", "2.8.0")
override def foreach[U](f: A => U): Unit = super.foreach(f)
/** This method clones the stack.
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 603086d209..d9ad58f054 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -403,7 +403,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*
* @return the reversed StringBuilder
*/
- @migration(2, 8, "Since 2.8 reverse returns a new instance. Use 'reverseContents' to update in place.")
+ @migration("`reverse` returns a new instance. Use `reverseContents` to update in place and return that StringBuilder itself.", "2.8.0")
override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying) reverse)
override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying))
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 8b2c6faa41..6e3ae13ada 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -40,14 +40,14 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) }
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
- @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
override def values: collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
override def keySet: collection.Set[A] = synchronized { super.keySet }
- @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
override def keys: collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 2a10458457..350e64739f 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -88,7 +88,7 @@ self =>
/* accessors */
override def foreach[U](f: Int => U): Unit = {
- rangeleft.foreach(f)
+ rangeleft.foreach(f.asInstanceOf[Int => Unit])
ind = len
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 37065e32fc..31750b0b0d 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -190,7 +190,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
} else {
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
- val table = new HashTable[K, DefaultEntry[K, V]] {
+ object table extends HashTable[K, DefaultEntry[K, V]] {
def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
sizeMapInit(table.length)
}
@@ -201,8 +201,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
}
i += 1
}
- val c = table.hashTableContents
- new ParHashMap(c)
+ new ParHashMap(table.hashTableContents)
}
/* classes */
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 9dbc7dc6c4..7763cdf318 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -119,10 +119,11 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
import collection.parallel.tasksupport._
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
-
+ private var seedvalue = 27
+
def +=(elem: T) = {
sz += 1
- val hc = improve(elemHashCode(elem))
+ val hc = improve(elemHashCode(elem), seedvalue)
val pos = hc >>> nonmasklen
if (buckets(pos) eq null) {
// initialize bucket
@@ -140,7 +141,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
private def parPopulate: FlatHashTable.Contents[T] = {
// construct it in parallel
- val table = new AddingFlatHashTable(size, tableLoadFactor)
+ val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
val (inserted, leftovers) = executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
var leftinserts = 0
for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
@@ -153,6 +154,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// TODO parallelize by keeping separate size maps and merging them
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
+ seedvalue = ParHashSetCombiner.this.seedvalue
}
for {
buffer <- buckets;
@@ -168,13 +170,13 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
* it has to take and allocates the underlying hash table in advance.
* Elements can only be added to it. The final size has to be adjusted manually.
* It is internal to `ParHashSet` combiners.
- *
*/
- class AddingFlatHashTable(numelems: Int, lf: Int) extends FlatHashTable[T] {
+ class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] {
_loadFactor = lf
table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor)))
tableSize = 0
threshold = FlatHashTable.newThreshold(_loadFactor, table.length)
+ seedvalue = inseedvalue
sizeMapInit(table.length)
override def toString = "AFHT(%s)".format(table.length)
@@ -310,6 +312,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
}
+
private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 487f9c7b87..1a27df1c10 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -97,19 +97,28 @@ object Codec extends LowPriorityCodecImplicits {
new Codec(decoder.charset()) { override def decoder = _decoder }
}
- @migration(2, 9, "This method was previously misnamed `toUTF8`. Converts from Array[Byte] to Array[Char].")
- def fromUTF8(bytes: Array[Byte]): Array[Char] = {
- val bbuffer = java.nio.ByteBuffer wrap bytes
+ @migration("This method was previously misnamed `toUTF8`. Converts from Array[Byte] to Array[Char].", "2.9.0")
+ def fromUTF8(bytes: Array[Byte]): Array[Char] = fromUTF8(bytes, 0, bytes.length)
+ def fromUTF8(bytes: Array[Byte], offset: Int, len: Int): Array[Char] = {
+ val bbuffer = java.nio.ByteBuffer.wrap(bytes, offset, len)
val cbuffer = UTF8.charSet decode bbuffer
- val chars = new Array[Char](cbuffer.remaining())
+ val chars = new Array[Char](cbuffer.remaining())
cbuffer get chars
chars
}
- @migration(2, 9, "This method was previously misnamed `fromUTF8`. Converts from character sequence to Array[Byte].")
+ @migration("This method was previously misnamed `fromUTF8`. Converts from character sequence to Array[Byte].", "2.9.0")
def toUTF8(cs: CharSequence): Array[Byte] = {
- val cbuffer = java.nio.CharBuffer wrap cs
+ val cbuffer = java.nio.CharBuffer.wrap(cs, 0, cs.length)
+ val bbuffer = UTF8.charSet encode cbuffer
+ val bytes = new Array[Byte](bbuffer.remaining())
+ bbuffer get bytes
+
+ bytes
+ }
+ def toUTF8(chars: Array[Char], offset: Int, len: Int): Array[Byte] = {
+ val cbuffer = java.nio.CharBuffer.wrap(chars, offset, len)
val bbuffer = UTF8.charSet encode cbuffer
val bytes = new Array[Byte](bbuffer.remaining())
bbuffer get bytes
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 553fb08c18..497de92c80 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -13,7 +13,6 @@ import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
-import annotation.migration
/**
* @author Stephane Micheloud
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index b67146942f..53d618db63 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -58,11 +58,11 @@ trait Ordered[A] extends java.lang.Comparable[A] {
*
* Returns `x` where:
*
- * - `x < 0` when `this > that`
+ * - `x < 0` when `this < that`
*
* - `x == 0` when `this == that`
*
- * - `x < 0` when `this > that`
+ * - `x > 0` when `this > that`
*
*/
def compare(that: A): Int
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index b23165154c..8fc74a9d5d 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -10,7 +10,7 @@ package scala.math
import java.util.Comparator
-/** Ordering is trait whose instances each represent a strategy for sorting
+/** Ordering is a trait whose instances each represent a strategy for sorting
* instances of a type.
*
* Ordering's companion object defines many implicit objects to deal with
@@ -262,12 +262,52 @@ object Ordering extends LowPriorityOrderingImplicits {
implicit object Long extends LongOrdering
trait FloatOrdering extends Ordering[Float] {
+ outer =>
+
def compare(x: Float, y: Float) = java.lang.Float.compare(x, y)
+
+ override def lteq(x: Float, y: Float): Boolean = x <= y
+ override def gteq(x: Float, y: Float): Boolean = x >= y
+ override def lt(x: Float, y: Float): Boolean = x < y
+ override def gt(x: Float, y: Float): Boolean = x > y
+ override def equiv(x: Float, y: Float): Boolean = x == y
+ override def max(x: Float, y: Float): Float = math.max(x, y)
+ override def min(x: Float, y: Float): Float = math.min(x, y)
+
+ override def reverse: Ordering[Float] = new FloatOrdering {
+ override def reverse = outer
+ override def compare(x: Float, y: Float) = outer.compare(y, x)
+
+ override def lteq(x: Float, y: Float): Boolean = outer.lteq(y, x)
+ override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x)
+ override def lt(x: Float, y: Float): Boolean = outer.lt(y, x)
+ override def gt(x: Float, y: Float): Boolean = outer.gt(y, x)
+ }
}
implicit object Float extends FloatOrdering
trait DoubleOrdering extends Ordering[Double] {
+ outer =>
+
def compare(x: Double, y: Double) = java.lang.Double.compare(x, y)
+
+ override def lteq(x: Double, y: Double): Boolean = x <= y
+ override def gteq(x: Double, y: Double): Boolean = x >= y
+ override def lt(x: Double, y: Double): Boolean = x < y
+ override def gt(x: Double, y: Double): Boolean = x > y
+ override def equiv(x: Double, y: Double): Boolean = x == y
+ override def max(x: Double, y: Double): Double = math.max(x, y)
+ override def min(x: Double, y: Double): Double = math.min(x, y)
+
+ override def reverse: Ordering[Double] = new DoubleOrdering {
+ override def reverse = outer
+ override def compare(x: Double, y: Double) = outer.compare(y, x)
+
+ override def lteq(x: Double, y: Double): Boolean = outer.lteq(y, x)
+ override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x)
+ override def lt(x: Double, y: Double): Boolean = outer.lt(y, x)
+ override def gt(x: Double, y: Double): Boolean = outer.gt(y, x)
+ }
}
implicit object Double extends DoubleOrdering
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index acd28f04f5..466b57dea7 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -127,7 +127,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
def arrayManifest: ClassManifest[Array[T]] =
- ClassManifest.classType[Array[T]](arrayClass[T](erasure))
+ ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
def newArray(len: Int): Array[T] =
java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
@@ -220,7 +220,7 @@ object ClassManifest {
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
- case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
+ case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index df5f64cdf6..be08409636 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -44,7 +44,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
override def arrayManifest: Manifest[Array[T]] =
- Manifest.classType[Array[T]](arrayClass[T](erasure))
+ Manifest.classType[Array[T]](arrayClass[T](erasure), this)
override def canEqual(that: Any): Boolean = that match {
case _: Manifest[_] => true
@@ -60,7 +60,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
override def hashCode = this.erasure.##
}
-trait AnyValManifest[T] extends Manifest[T] with Equals {
+sealed abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
override def <:<(that: ClassManifest[_]): Boolean =
(that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal)
override def canEqual(other: Any) = other match {
@@ -68,7 +68,7 @@ trait AnyValManifest[T] extends Manifest[T] with Equals {
case _ => false
}
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
+ override val hashCode = System.identityHashCode(this)
}
/** The object `Manifest` defines factory methods for manifests.
@@ -76,127 +76,104 @@ trait AnyValManifest[T] extends Manifest[T] with Equals {
* in client code.
*/
object Manifest {
- private def ObjectClass = classOf[java.lang.Object]
+ def valueManifests: List[AnyValManifest[_]] =
+ List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
- val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte] {
+ val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
def erasure = java.lang.Byte.TYPE
- override def toString = "Byte"
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
private def readResolve(): Any = Manifest.Byte
}
- val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short] {
+ val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
def erasure = java.lang.Short.TYPE
- override def toString = "Short"
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
private def readResolve(): Any = Manifest.Short
}
- val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char] {
+ val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
def erasure = java.lang.Character.TYPE
- override def toString = "Char"
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
private def readResolve(): Any = Manifest.Char
}
- val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int] {
+ val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
def erasure = java.lang.Integer.TYPE
- override def toString = "Int"
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
private def readResolve(): Any = Manifest.Int
}
- val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long] {
+ val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
def erasure = java.lang.Long.TYPE
- override def toString = "Long"
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
private def readResolve(): Any = Manifest.Long
}
- val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float] {
+ val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
def erasure = java.lang.Float.TYPE
- override def toString = "Float"
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
private def readResolve(): Any = Manifest.Float
}
- val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double] {
+ val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
def erasure = java.lang.Double.TYPE
- override def toString = "Double"
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
private def readResolve(): Any = Manifest.Double
}
- val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean] {
+ val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
def erasure = java.lang.Boolean.TYPE
- override def toString = "Boolean"
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
private def readResolve(): Any = Manifest.Boolean
}
- val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit] {
+ val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
def erasure = java.lang.Void.TYPE
- override def toString = "Unit"
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
private def readResolve(): Any = Manifest.Unit
}
- val Any: Manifest[Any] = new ClassTypeManifest[scala.Any](None, ObjectClass, Nil) {
- override def toString = "Any"
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any]("Any") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Any
}
- val Object: Manifest[Object] = new ClassTypeManifest[java.lang.Object](None, ObjectClass, Nil) {
- override def toString = "Object"
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object]("Object") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Object
}
- val AnyVal: Manifest[AnyVal] = new ClassTypeManifest[scala.AnyVal](None, ObjectClass, Nil) {
- override def toString = "AnyVal"
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal]("AnyVal") {
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.AnyVal
}
- val Null: Manifest[Null] = new ClassTypeManifest[scala.Null](None, ObjectClass, Nil) {
- override def toString = "Null"
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null]("Null") {
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Null
}
- val Nothing: Manifest[Nothing] = new ClassTypeManifest[scala.Nothing](None, ObjectClass, Nil) {
- override def toString = "Nothing"
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing]("Nothing") {
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Nothing
}
@@ -231,6 +208,11 @@ object Manifest {
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+ private abstract class PhantomManifest[T](override val toString: String) extends ClassTypeManifest[T](None, classOf[java.lang.Object], Nil) {
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override val hashCode = System.identityHashCode(this)
+ }
+
/** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
diff --git a/src/library/scala/reflect/api/MacroContext.scala b/src/library/scala/reflect/api/MacroContext.scala
new file mode 100644
index 0000000000..e23357d26e
--- /dev/null
+++ b/src/library/scala/reflect/api/MacroContext.scala
@@ -0,0 +1,15 @@
+package scala.reflect
+package api
+
+trait MacroContext extends Universe {
+
+ /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+ */
+ def captureVariable(vble: Symbol): Unit
+
+ /** Mark given identifier as a reference to a captured variable itself
+ * suppressing dereferencing with the `elem` field.
+ */
+ def referenceCapturedVariable(id: Ident): Tree
+
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index 752319d9a4..2394925657 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -542,12 +542,18 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
case class Select(qualifier: Tree, name: Name)
extends RefTree
+ def Select(qualifier: Tree, name: String): Select =
+ Select(qualifier, newTermName(name))
+
def Select(qualifier: Tree, sym: Symbol): Select =
Select(qualifier, sym.name) setSymbol sym
/** Identifier <name> */
case class Ident(name: Name) extends RefTree { }
+ def Ident(name: String): Ident =
+ Ident(newTermName(name))
+
def Ident(sym: Symbol): Ident =
Ident(sym.name) setSymbol sym
@@ -625,6 +631,11 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe =>
def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
+ /** An empty deferred value definition corresponding to:
+ * val _: _
+ * This is used as a placeholder in the `self` parameter Template if there is
+ * no definition of a self value of self type.
+ */
def emptyValDef: ValDef
// ------ traversers, copiers, and transformers ---------------------------------------------
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index f48d99f5af..cbe778f09b 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -26,7 +26,7 @@ abstract class AbstractPartialFunction[-T1, +R]
private var fallBackField: PartialFunction[T1 @uncheckedVariance, R @uncheckedVariance] = _
def fallBack: PartialFunction[T1, R] = synchronized {
- if (fallBackField == null) fallBackField = PartialFunction.empty
+ if (fallBackField eq null) fallBackField = PartialFunction.empty
fallBackField
}
@@ -38,7 +38,7 @@ abstract class AbstractPartialFunction[-T1, +R]
override def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = {
val result = this.clone.asInstanceOf[AbstractPartialFunction[A1, B1]]
result.synchronized {
- result.fallBackField = this.fallBackField orElse that
+ result.fallBackField = if (this.fallBackField eq null) that else this.fallBackField orElse that
result
}
}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index c726c56d0e..b19c8d086c 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -769,6 +769,24 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
+
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ if (arg instanceof java.lang.Boolean)
+ return true;
+ else
+ return isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
/** arg.toChar */
public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 998661895b..22de5544a8 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -142,6 +142,11 @@ private[scala] trait PropertiesTrait {
*/
def isWin = osName startsWith "Windows"
def isMac = javaVendor startsWith "Apple"
+
+ // This is looking for javac, tools.jar, etc.
+ // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
+ // and finally the system property based javaHome.
+ def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
def scalaCmd = if (isWin) "scala.bat" else "scala"
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 751539243b..4004a01ad9 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -89,14 +89,14 @@ trait Parsers {
sealed abstract class ParseResult[+T] {
/** Functional composition of ParseResults.
*
- * @param `f` the function to be lifted over this result
+ * @param f the function to be lifted over this result
* @return `f` applied to the result of this `ParseResult`, packaged up as a new `ParseResult`
*/
def map[U](f: T => U): ParseResult[U]
/** Partial functional composition of ParseResults.
*
- * @param `f` the partial function to be lifted over this result
+ * @param f the partial function to be lifted over this result
* @param error a function that takes the same argument as `f` and
* produces an error message to explain why `f` wasn't applicable
* (it is called when this is the case)
@@ -240,7 +240,7 @@ trait Parsers {
// no filter yet, dealing with zero is tricky!
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def append[U >: T](p0: => Parser[U]): Parser[U] = { lazy val p = p0 // lazy argument
Parser{ in => this(in) append p(in)}
}
@@ -259,7 +259,7 @@ trait Parsers {
* but easier to pattern match on) that contains the result of `p` and
* that of `q`. The resulting parser fails if either `p` or `q` fails.
*/
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { lazy val p = q // lazy argument
(for(a <- this; b <- p) yield new ~(a,b)).named("~")
}
@@ -272,7 +272,7 @@ trait Parsers {
* succeeds -- evaluated at most once, and only when necessary.
* @return a `Parser` that -- on success -- returns the result of `q`.
*/
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def ~> [U](q: => Parser[U]): Parser[U] = { lazy val p = q // lazy argument
(for(a <- this; b <- p) yield b).named("~>")
}
@@ -287,7 +287,7 @@ trait Parsers {
* @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
* @return a `Parser` that -- on success -- returns the result of `p`.
*/
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def <~ [U](q: => Parser[U]): Parser[T] = { lazy val p = q // lazy argument
(for(a <- this; b <- p) yield a).named("<~")
}
@@ -302,7 +302,7 @@ trait Parsers {
* `p ~! q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
* In case of failure, no back-tracking is performed (in an earlier parser produced by the `|` combinator).
*
- * @param q a parser that will be executed after `p` (this parser) succeeds
+ * @param p a parser that will be executed after `p` (this parser) succeeds
* @return a `Parser` that -- on success -- returns a `~` (like a Pair, but easier to pattern match on)
* that contains the result of `p` and that of `q`.
* The resulting parser fails if either `p` or `q` fails, this failure is fatal.
@@ -332,7 +332,7 @@ trait Parsers {
* @param q0 a parser that accepts if p consumes less characters. -- evaluated at most once, and only when necessary
* @return a `Parser` that returns the result of the parser consuming the most characters (out of `p` and `q`).
*/
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def ||| [U >: T](q0: => Parser[U]): Parser[U] = new Parser[U] {
lazy val q = q0 // lazy argument
def apply(in: Input) = {
@@ -367,7 +367,7 @@ trait Parsers {
* @param v The new result for the parser, evaluated at most once (if `p` succeeds), not evaluated at all if `p` fails.
* @return a parser that has the same behaviour as the current parser, but whose successful result is `v`
*/
- @migration(2, 9, "As of 2.9, the call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def ^^^ [U](v: => U): Parser[U] = new Parser[U] {
lazy val v0 = v // lazy argument
def apply(in: Input) = Parser.this(in) map (x => v0)
@@ -706,7 +706,7 @@ trait Parsers {
* @return A parser that returns a list of results produced by first applying `f` and then
* repeatedly `p` to the input (it only succeeds if `f` matches).
*/
- @migration(2, 9, "As of 2.9, the p0 call-by-name arguments is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.")
+ @migration("The `p0` call-by-name arguments is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
def rep1[T](first: => Parser[T], p0: => Parser[T]): Parser[List[T]] = Parser { in =>
lazy val p = p0 // lazy argument
val elems = new ListBuffer[T]
@@ -733,9 +733,9 @@ trait Parsers {
* `repN(n, p)` uses `p` exactly `n` time to parse the input
* (the result is a `List` of the `n` consecutive results of `p`).
*
- * @param p a `Parser` that is to be applied successively to the input
- * @param n the exact number of times `p` must succeed
- * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
+ * @param p a `Parser` that is to be applied successively to the input
+ * @param num the exact number of times `p` must succeed
+ * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
* (and that only succeeds if `p` matches exactly `n` times).
*/
def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index 127e6e0ab7..df52b34f87 100644
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -41,7 +41,7 @@ object Elem {
class Elem(
override val prefix: String,
val label: String,
- override val attributes: MetaData,
+ attributes1: MetaData,
override val scope: NamespaceBinding,
val child: Node*)
extends Node with Serializable
@@ -49,6 +49,8 @@ extends Node with Serializable
final override def doCollectNamespaces = true
final override def doTransform = true
+ override val attributes = MetaData.normalize(attributes1, scope)
+
if (prefix == "")
throw new IllegalArgumentException("prefix of zero length, use null instead")
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 98e863eb37..c516747bae 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -38,8 +38,8 @@ object MetaData {
def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
lazy val key = getUniversalKey(md, scope)
if (md eq Null) normalized_attribs
- else if (set(key)) iterate(md.next, normalized_attribs, set)
- else iterate(md.next, md copy normalized_attribs, set + key)
+ else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set)
+ else md copy iterate(md.next, normalized_attribs, set + key)
}
iterate(attribs, Null, Set())
}
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index 436dfcda43..b80d6a1c73 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -13,22 +13,25 @@ package scala.xml
*
* @param pre ...
* @param key ...
- * @param value the attribute value, which may not be null
+ * @param value the attribute value
* @param next ...
*/
class PrefixedAttribute(
val pre: String,
val key: String,
val value: Seq[Node],
- val next: MetaData)
+ val next1: MetaData)
extends Attribute
{
- if (value eq null)
- throw new UnsupportedOperationException("value is null")
+ val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Utility.parseAttributeValue(value), next) */
+ /** same as this(pre, key, Text(value), next), or no attribute if value is null */
def this(pre: String, key: String, value: String, next: MetaData) =
- this(pre, key, Text(value), next)
+ this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next)
+
+ /** same as this(pre, key, value.get, next), or no attribute if value is None */
+ def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) =
+ this(pre, key, value.orNull, next)
/** Returns a copy of this unprefixed attribute with the given
* next field.
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index c56fba1e6c..b6800d5ed1 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -22,7 +22,7 @@ extends Attribute
final val pre = null
val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Text(value), next) */
+ /** same as this(key, Text(value), next), or no attribute if value is null */
def this(key: String, value: String, next: MetaData) =
this(key, if (value ne null) Text(value) else null: NodeSeq, next)
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9b48f4e1bb..fc20b892b9 100644
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -61,7 +61,7 @@ object Utility extends AnyRef with parsing.TokenTests {
val key = md.key
val smaller = sort(md.filter { m => m.key < key })
val greater = sort(md.filter { m => m.key > key })
- smaller.append( Null ).append(md.copy ( greater ))
+ smaller.copy(md.copy ( greater ))
}
/** Return the node with its attribute list sorted alphabetically
diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala
index ddfb8cb0f2..43d097a137 100644
--- a/src/manual/scala/tools/docutil/EmitHtml.scala
+++ b/src/manual/scala/tools/docutil/EmitHtml.scala
@@ -201,164 +201,26 @@ object EmitHtml {
out println "</body>"
out println "</html>"
}
-/* */
-/*
- private def group(ns: Iterable[NodeSeq]): NodeSeq = {
- val zs = new NodeBuffer
- for (z <- ns) { zs &+ z }
- zs
- }
-
- def emitSection(section: Section, depth: int): NodeSeq = {
- def emitText(text: AbstractText): NodeSeq = text match {
- case seq:SeqText =>
- group(seq.components.toList.map(item => emitText(item)))
-
- case Text(text) =>
- scala.xml.Text(escape(text))
-
- case MDash =>
- scala.xml.Text("&#8212;")
-
- case NDash =>
- scala.xml.Text("&#8211;")
-
- case Bold(text) =>
- <b>{emitText(text)}</b>
-
- case Italic(text) =>
- <i>{emitText(text)}</i>
-
- case Emph(text) =>
- <em>{emitText(text)}</em>
-
- case Mono(text) =>
- <code>{emitText(text)}</code>
-
- case Quote(text) =>
- emitText("\"" & text & "\"")
-
- case DefinitionList(definitions @ _*) =>
- <ins><dl>
- {definitions.toList.map(d =>
- <dt>{emitText(d.term)}</dt>
- <dd>{emitText(d.description)}</dd>
- )}
- </dl></ins>
-
- case Link(label, url) =>
- <a href={url}>{emitText(label)}</a>
-
- case _ =>
- error("unknown text node " + text)
- }
-
- def emitParagraph(para: Paragraph): NodeSeq = para match {
- case TextParagraph(text) =>
- <p>{emitText(text)}</p>
- case BlockQuote(text) =>
- <blockquote>{emitText(text)}</blockquote>
-
- case CodeSample(text) =>
- <blockquote><pre>{escape(text)}</pre></blockquote>
-
- case lst:BulletList =>
- <ul>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ul>
-
- case lst:NumberedList =>
- <ol>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ol>
-
- case TitledPara(title, text) =>
- <p><strong>{escape(title)}</strong></p>
- {emitText(text)}
-
- case EmbeddedSection(sect) =>
- {emitSection(sect, depth + 1)}
-
- case _ =>
- error("unknown paragraph node " + para)
- }
-
- val name = section.title.replaceAll("\\p{Space}", "_").toLowerCase()
- <h3 id={name}>{section.title}</h3>.concat(
- group(section.paragraphs.toList.map(p => emitParagraph(p))))
- }
-
- private def emit3columns(col1: String, col2: String, col3: String): NodeSeq =
- <div style="float:left;">{col1}</div>
- <div style="float:right;">{col3}</div>
- <div style="text-align:center;">{col2}</div>
- <div style="clear:both;"></div>
-
- private def emitHeader(col1: String, col2: String, col3: String): NodeSeq =
- <div style="margin: 0 0 2em 0;">
- {emit3columns(col1, col2, col3)}
- </div>
-
- private def emitFooter(col1: String, col2: String, col3: String): NodeSeq = {
- scala.xml.Comment("footer")
- <div style="margin: 2em 0 0 0;">
- {emit3columns(col1, col2, col3)}
- </div>
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitHtml(classname)
+ case Array(classname, file, _*) => emitHtml(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
}
- def emitDocument(document: Document, addDocType: Boolean) = {
- val name = document.title + "(" + document.category.id + ")"
- val doc =
- <html xml:lang="en">
- <head>
- <title>{document.title}</title>
- <meta http-equiv="Content-Language" content="en"/>
- <meta http-equiv="Content-Type" content={"text/html; charset=" + document.encoding}/>
- <meta name="Author" content={document.author}/>
- <style type="text/css">
- {" blockquote, pre { margin:1em 4em 1em 4em; }\n" +
- " p { margin:1em 2em 1em 2em; text-align:justify; }\n"}
- </style>
- </head>
- <body>
- {emitHeader(name, "" + document.category, name)}
- {document.sections.map(s => emitSection(s, 2))}
- {emitFooter("version " + document.version, document.date, name)}
- </body>
- </html>
- out.println(doc)
-/*
- val w = new java.io.StringWriter
- val id = scala.xml.dtd.PublicID("PUBLIC", null)
- val dtd = null //scala.xml.dtd.DEFAULT(true, "")
- val doctype = scala.xml.dtd.DocType("html", id, null) //List(dtd))
- XML.write(w, doc, document.encoding, true/ *xmlDecl* /, doctype)
- out.println(w.toString())
-*/
- }
-*/
- def main(args: Array[String]) {
- if (args.length < 1) {
- System.err println "usage: EmitHtml <classname>"
- sys.exit(1)
- }
+ def emitHtml(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl loadClass args(0)
+ val clasz = cl loadClass classname
val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
} catch {
case ex: Exception =>
ex.printStackTrace()
- System.err println "Error in EmitHtml"
+ System.err println "Error in EmitManPage"
sys.exit(1)
}
}
-
- def emitHtml(classname: String, outStream: java.io.OutputStream) {
- out setOut outStream
- main(Array(classname))
- }
}
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index 4a66e2ed07..3e0b02a415 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -163,10 +163,17 @@ object EmitManPage {
doc.sections foreach (s => emitSection(s, 1))
}
- def main(args: Array[String]) {
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitManPage(classname)
+ case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
+ }
+
+ def emitManPage(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl loadClass args(0)
+ val clasz = cl loadClass classname
val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
@@ -177,9 +184,4 @@ object EmitManPage {
sys.exit(1)
}
}
-
- def emitManPage(classname: String, outStream: java.io.OutputStream) {
- out setOut outStream
- main(Array(classname))
- }
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index f4ebfb7e7d..68688ff949 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -75,7 +75,8 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
val logWriter = new FileWriter(log)
// check whether there is a ".flags" file
- val flagsFileName = "%s.flags" format (basename(log.getName) dropRight 4) // 4 is "-run" or similar
+ val logFile = basename(log.getName)
+ val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0)
val args = allOpts.toList
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 299296b01d..ae54e51761 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -3,21 +3,21 @@ package nest
import java.io.File
import scala.tools.nsc.io.{ Directory }
+import scala.util.Properties.setProp
-class SBTRunner extends DirectRunner {
-
+object SBTRunner extends DirectRunner {
+
val fileManager = new FileManager {
var JAVACMD: String = "java"
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
- val testRootPath: String = PathSettings.testRoot.path
- val testRootDir: Directory = PathSettings.testRoot
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
-
+
def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.HashMap[String,Int] = {
-
def convert(scalaM:scala.collection.immutable.Map[String,Int]):java.util.HashMap[String,Int] = {
val javaM = new java.util.HashMap[String,Int]()
for(elem <- scalaM) yield {javaM.put(elem._1,elem._2)}
@@ -25,11 +25,60 @@ class SBTRunner extends DirectRunner {
}
def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
+ if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
+ convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ }
- convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ case class CommandLineOptions(classpath: Option[String] = None,
+ tests: Map[String, Array[File]] = Map(),
+ scalacOptions: Seq[String] = Seq(),
+ justFailedTests: Boolean = false)
+
+ def mainReflect(args: Array[String]): java.util.Map[String,Int] = {
+ setProp("partest.debug", "true")
+ setProperties()
+
+ val Argument = new scala.util.matching.Regex("-(.*)")
+ def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
+ case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
+ case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
+ case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
+ case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
+ case Seq() => data
+ case x => sys.error("Unknown command line options: " + x)
+ }
+ val config = parseArgs(args, CommandLineOptions())
+ fileManager.SCALAC_OPTS = config.scalacOptions
+ fileManager.CLASSPATH = config.classpath getOrElse error("No classpath set")
+ // Find scala library jar file...
+ val lib: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-library.*\\.jar")).headOption
+ fileManager.LATEST_LIB = lib getOrElse error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ // TODO - Do something useful here!!!
+ fileManager.JAVAC_CMD = "javac"
+ fileManager.failed = config.justFailedTests
+ // TODO - Make this a flag?
+ //fileManager.updateCheck = true
+ // Now run and report...
+ val runs = config.tests.filterNot(_._2.isEmpty)
+ // This next bit uses java maps...
+ import collection.JavaConverters._
+ (for {
+ (testType, files) <- runs
+ (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
+ } yield (path, result)).seq asJava
+ }
+ def main(args: Array[String]): Unit = {
+ import collection.JavaConverters._
+ val failures = for {
+ (path, result) <- mainReflect(args).asScala
+ if result == 1 || result == 2
+ val resultName = (if(result == 1) " [FAILED]" else " [TIMEOUT]")
+ } yield path + resultName
+ // Re-list all failures so we can go figure out what went wrong.
+ failures foreach System.err.println
+ if(!failures.isEmpty) sys.exit(1)
}
}
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 73220d1048..466ec53c79 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -72,7 +72,7 @@ class ByteArrayReader(content: Array[Byte]) {
/** read an UTF8 encoded string
*/
def nextUTF8(len: Int): String = {
- val cs = scala.io.Codec.fromUTF8(buf.slice(bp, bp + len))
+ val cs = scala.io.Codec.fromUTF8(buf, bp, len)
bp += len
new String(cs)
}
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index db9d6c5ed9..02b940ab16 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -9,7 +9,7 @@
package scala.tools.scalap
import java.io._
-
+import scala.reflect.NameTransformer
class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer) {
@@ -32,22 +32,22 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
}
def nameToClass(str: String): String = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.Any" else res
}
def nameToClass0(str: String) = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.AnyRef" else res
}
def nameToSimpleClass(str: String) =
- Names.decode(str.substring(str.lastIndexOf('/') + 1))
+ NameTransformer.decode(str.substring(str.lastIndexOf('/') + 1))
def nameToPackage(str: String) = {
val inx = str.lastIndexOf('/')
val name = if (inx == -1) str else str.substring(0, inx).replace('/', '.')
- Names.decode(name)
+ NameTransformer.decode(name)
}
def sigToType(str: String): String =
@@ -119,9 +119,9 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
def printField(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) {
print(flagsToStr(false, flags))
if ((flags & 0x0010) != 0)
- print("val " + Names.decode(getName(name)))
+ print("val " + NameTransformer.decode(getName(name)))
else
- print("final var " + Names.decode(getName(name)))
+ print("final var " + NameTransformer.decode(getName(name)))
print(": " + getType(tpe) + ";").newline
}
@@ -139,20 +139,20 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
if (getName(name) == "<init>") {
print("def this" + getType(tpe) + ";").newline
} else {
- print("def " + Names.decode(getName(name)))
+ print("def " + NameTransformer.decode(getName(name)))
print(getType(tpe) + ";").newline
}
case Some(str) =>
if (getName(name) == "<init>")
print("def this" + str + ";").newline
else
- print("def " + Names.decode(getName(name)) + str + ";").newline
+ print("def " + NameTransformer.decode(getName(name)) + str + ";").newline
}
case None =>
if (getName(name) == "<init>") {
print("def this" + getType(tpe) + ";").newline
} else {
- print("def " + Names.decode(getName(name)))
+ print("def " + NameTransformer.decode(getName(name)))
print(getType(tpe) + ";").newline
}
}
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 7254b00480..a8a9c65f63 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -8,6 +8,7 @@
package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
+import scala.reflect.NameTransformer
import scalax.rules.scalasig._
import tools.nsc.util.{ ClassPath, JavaClassPath }
import tools.util.PathResolver
@@ -96,7 +97,7 @@ class Main {
*/
def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
// find the classfile
- val encName = Names.encode(
+ val encName = NameTransformer.encode(
if (classname == "scala.AnyRef") "java.lang.Object"
else classname)
val cls = path.findClass(encName)
diff --git a/src/scalap/scala/tools/scalap/Names.scala b/src/scalap/scala/tools/scalap/Names.scala
deleted file mode 100644
index 1d66b31ce3..0000000000
--- a/src/scalap/scala/tools/scalap/Names.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* ___ ____ ___ __ ___ ___
-** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
-** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
-**
-*/
-
-
-package scala.tools.scalap
-
-
-object Names {
-
- val operatorName = new Array[String](128)
- operatorName('$') = "$"
- operatorName('~') = "$tilde"
- operatorName('=') = "$eq"
- operatorName('<') = "$less"
- operatorName('>') = "$greater"
- operatorName('!') = "$bang"
- operatorName('#') = "$hash"
- operatorName('%') = "$percent"
- operatorName('^') = "$up"
- operatorName('&') = "$amp"
- operatorName('|') = "$bar"
- operatorName('*') = "$times"
- operatorName('/') = "$div"
- operatorName('\\') = "$bslash"
- operatorName('+') = "$plus"
- operatorName('-') = "$minus"
- operatorName(':') = "$colon"
-
- /** Replace operator symbols by corresponding "$op_name" in names.
- */
- def encode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c < 128) {
- val nop = operatorName(c)
- if (nop == null)
- res.append(c)
- else
- res.append(nop)
- } else
- res.append(c)
- i = i + 1
- }
- res.toString()
- }
-
- /** Replace "$op_name" by corresponding operator symbols in names.
- */
- def decode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c == '$') {
- var j = len
- while (j > i) {
- val prefix = name.substring(i, j)
- val c = lookup(prefix)
- if (c != null) {
- i = j
- res.append(c)
- } else
- j = j - 1
- }
- } else {
- i = i + 1
- res.append(c)
- }
- }
- res.toString()
- }
-
- /** Looks up the array entry for the operator name.
- */
- def lookup(string: String): String = {
- var i = 0
- var res: String = null
- while (i < 128) {
- if (string.equals(operatorName(i))) {
- res = String.valueOf(i.asInstanceOf[Char])
- i = 128
- }
- i = i + 1
- }
- res
- }
-
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index d53d8e1fc1..84f28af7ce 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -65,8 +65,11 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
* stores and array of bytes for the decompiler
*/
def fromUTF8StringAndBytes = {
- val chunk: Array[Byte] = bytes drop pos take length
- StringBytesPair(io.Codec.fromUTF8(chunk).mkString, chunk)
+ val chunk: Array[Byte] = new Array[Byte](length)
+ System.arraycopy(bytes, pos, chunk, 0, length)
+ val str = new String(io.Codec.fromUTF8(bytes, pos, length))
+
+ StringBytesPair(str, chunk)
}
def byte(i : Int) = bytes(pos) & 0xFF
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index df78bad25e..aa454934c1 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -13,9 +13,8 @@ package scalasig
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
-
import scala.tools.scalap.scalax.util.StringUtil
-import reflect.NameTransformer
+import scala.reflect.NameTransformer
import java.lang.String
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
diff --git a/test/benchmarks/src/scala/collection/immutable/range-bench.scala b/test/benchmarks/src/scala/collection/immutable/range-bench.scala
new file mode 100644
index 0000000000..e167ff04e8
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/immutable/range-bench.scala
@@ -0,0 +1,61 @@
+package scala.collection.immutable
+package benchmarks
+
+object RangeTest {
+ // not inlined any more, needs investigation
+ //
+ // class XXS {
+ // private val array = Array.range(0, 100)
+ // def tst = { var sum = 0; for (i <- 0 until array.length) sum += array(i); sum }
+ // }
+
+ var x: Int = 0
+
+ def foreachSum(max: Int): Int = {
+ var sum = 0
+ 1 to max foreach (sum += _)
+ sum
+ }
+ def whileSum(max: Int) = {
+ var sum = 0
+ var num = 1
+ while (num <= max) {
+ sum += num
+ num += 1
+ }
+ sum
+ }
+
+ def show(max: Int, foreachNanos: Long, whileNanos: Long) {
+ val winner = if (foreachNanos < whileNanos) "foreachSum" else "whileSum"
+ val ratio = if (foreachNanos < whileNanos) foreachNanos.toDouble / whileNanos else whileNanos.toDouble / foreachNanos
+ println("1 to %d:, %12s wins, %.3f: foreach %.3f while %.3f".format(
+ max, winner, ratio,
+ foreachNanos.toDouble / 1000000L,
+ whileNanos.toDouble / 1000000L)
+ )
+ }
+
+ def run(max: Int) = {
+ val foreachFirst = util.Random.nextBoolean
+ val t1 = System.nanoTime
+ x = if (foreachFirst) foreachSum(max) else whileSum(max)
+ val t2 = System.nanoTime
+ x = if (foreachFirst) whileSum(max) else foreachSum(max)
+ val t3 = System.nanoTime
+
+ val foreachNanos = if (foreachFirst) t2 - t1 else t3 - t2
+ val whileNanos = if (foreachFirst) t3 - t2 else t2 - t1
+ show(max, foreachNanos, whileNanos)
+ }
+
+ def main(args: Array[String]): Unit = {
+ var max = if (args.isEmpty) 100 else args(0).toInt
+ while (max > 0) {
+ run(max)
+ run(max)
+ run(max)
+ max += (max / 7)
+ }
+ }
+}
diff --git a/test/files/presentation/properties.check b/test/disabled/properties.check
index a721d49e3a..a721d49e3a 100644
--- a/test/files/presentation/properties.check
+++ b/test/disabled/properties.check
diff --git a/test/files/presentation/properties/Runner.scala b/test/disabled/properties/Runner.scala
index 1ef3cf9025..1ef3cf9025 100644
--- a/test/files/presentation/properties/Runner.scala
+++ b/test/disabled/properties/Runner.scala
diff --git a/test/files/presentation/properties/src/properties.scala b/test/disabled/properties/src/properties.scala
index 35b6a92221..35b6a92221 100644
--- a/test/files/presentation/properties/src/properties.scala
+++ b/test/disabled/properties/src/properties.scala
diff --git a/test/files/continuations-neg/t2949.check b/test/files/continuations-neg/t2949.check
index dd9768807c..411aed1b5b 100644
--- a/test/files/continuations-neg/t2949.check
+++ b/test/files/continuations-neg/t2949.check
@@ -1,6 +1,6 @@
t2949.scala:13: error: type mismatch;
found : Int
- required: ? @scala.util.continuations.cpsParam[List[?],Any]
+ required: ? @scala.util.continuations.cpsParam[List[?],?]
x * y
^
one error found
diff --git a/test/files/jvm/mkLibNatives.bat b/test/files/jvm/mkLibNatives.bat
index e11b6ee21c..2f99f7aab5 100755
--- a/test/files/jvm/mkLibNatives.bat
+++ b/test/files/jvm/mkLibNatives.bat
@@ -67,4 +67,4 @@ goto end
:end
if "%OS%"=="Windows_NT" @endlocal
-
+exit /b %errorlevel%
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 8704bcc643..15708f0c3b 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -160,8 +160,8 @@ x = Map(C -> 3, B -> 2, A -> 1)
y = Map(C -> 3, A -> 1, B -> 2)
x equals y: true, y equals x: true
-x = Set(layers, title, buffers)
-y = Set(layers, title, buffers)
+x = Set(buffers, title, layers)
+y = Set(buffers, title, layers)
x equals y: true, y equals x: true
x = History()
@@ -279,8 +279,8 @@ x = ParHashMap(1 -> 2, 2 -> 4)
y = ParHashMap(1 -> 2, 2 -> 4)
x equals y: true, y equals x: true
-x = ParHashSet(2, 1, 3)
-y = ParHashSet(2, 1, 3)
+x = ParHashSet(1, 2, 3)
+y = ParHashSet(1, 2, 3)
x equals y: true, y equals x: true
x = ParRange(0, 1, 2, 3, 4)
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
index 75dc539137..9fbedc2ae6 100644
--- a/test/files/jvm/xml03syntax.check
+++ b/test/files/jvm/xml03syntax.check
@@ -23,4 +23,4 @@ true
4
node=<elem key="<b>hello</b>"></elem>, key=Some(<b>hello</b>)
-node=<elem ></elem>, key=None
+node=<elem></elem>, key=None
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index d45d16165f..0881205bb4 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -28,12 +28,6 @@ checksensible.scala:27: error: comparing values of types Int and Unit using `=='
checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
1 == "abc"
^
-checksensible.scala:32: error: String and Int are unrelated: they will most likely never compare equal
- "abc" == 1 // warns because the lub of String and Int is Any
- ^
-checksensible.scala:33: error: Some[Int] and Int are unrelated: they will most likely never compare equal
- Some(1) == 1 // as above
- ^
checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
new AnyRef == 1
^
@@ -100,4 +94,4 @@ checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and
checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
while ((c = in.read) != -1)
^
-34 errors found
+32 errors found
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
new file mode 100644
index 0000000000..d98422dacb
--- /dev/null
+++ b/test/files/neg/logImplicits.check
@@ -0,0 +1,19 @@
+logImplicits.scala:2: applied implicit conversion from xs.type to ?{val size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps[Byte]
+ def f(xs: Array[Byte]) = xs.size
+ ^
+logImplicits.scala:7: applied implicit conversion from String("abc") to ?{val map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps
+ def f = "abc" map (_ + 1)
+ ^
+logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
+ math.max(122, x: Int)
+ ^
+logImplicits.scala:19: applied implicit conversion from Int(1) to ?{val ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A]
+ def f = (1 -> 2) + "c"
+ ^
+logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{val +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd
+ def f = (1 -> 2) + "c"
+ ^
+logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined
+class Un {
+ ^
+one error found
diff --git a/test/files/neg/logImplicits.flags b/test/files/neg/logImplicits.flags
new file mode 100644
index 0000000000..97e5ae94ef
--- /dev/null
+++ b/test/files/neg/logImplicits.flags
@@ -0,0 +1 @@
+-Xlog-implicit-conversions \ No newline at end of file
diff --git a/test/files/neg/logImplicits.scala b/test/files/neg/logImplicits.scala
new file mode 100644
index 0000000000..fb5dd8a025
--- /dev/null
+++ b/test/files/neg/logImplicits.scala
@@ -0,0 +1,25 @@
+class A {
+ def f(xs: Array[Byte]) = xs.size
+ def g(xs: Array[Byte]) = xs.length
+}
+
+class B {
+ def f = "abc" map (_ + 1)
+}
+
+object C {
+ final val x = "abc"
+
+ implicit def convert(p: x.type): Int = 123
+
+ math.max(122, x: Int)
+}
+
+class D {
+ def f = (1 -> 2) + "c"
+}
+
+class Un {
+ // forcing post-typer failure, since we're only interested in the output from the above
+ def unimplemented: Int
+} \ No newline at end of file
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
index 97146e88f7..d7dfacf3db 100644
--- a/test/files/neg/migration28.check
+++ b/test/files/neg/migration28.check
@@ -1,6 +1,5 @@
-migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics:
-This scanRight definition has changed in 2.9.
-The previous behavior can be reproduced with scanRight.reverse.
+migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
List(1,2,3,4,5).scanRight(0)(_+_)
^
one error found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 03e44f745d..01bbe2de4e 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -10,8 +10,7 @@ names-defaults-neg.scala:5: error: type mismatch;
names-defaults-neg.scala:8: error: positional after named argument.
test1(b = "(*", 23)
^
-names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
test2(x = 1)
^
names-defaults-neg.scala:15: error: not found: value c
@@ -26,8 +25,7 @@ names-defaults-neg.scala:17: error: not found: value m
names-defaults-neg.scala:18: error: not found: value m
test7 { m = 1 } // no named arguments in argument block
^
-names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
test8(x = 1)
^
names-defaults-neg.scala:22: error: parameter specified twice: a
@@ -85,7 +83,7 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
- required: ?T[?T[List[?T[X forSome { type X }]]]]
+ required: ?T
Error occurred in an application involving default arguments.
test4()
^
@@ -118,8 +116,7 @@ names-defaults-neg.scala:93: error: parameter specified twice: b
names-defaults-neg.scala:98: error: unknown parameter name: m
f3818(y = 1, m = 1)
^
-names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
delay(var2 = 40)
^
names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1)
@@ -146,15 +143,13 @@ names-defaults-neg.scala:164: error: variable definition needs type because 'x'
names-defaults-neg.scala:167: error: variable definition needs type because 'x' is used as a named argument in its body.
def u6 { var x = u.f(x = "32") }
^
-names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
def u9 { var x: Int = u.f(x = 1) }
^
names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
class u18 { var x: Int = u.f(x = 1) }
^
one warning found
diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check
index dabf53f126..5238141c4e 100644
--- a/test/files/neg/t1960.check
+++ b/test/files/neg/t1960.check
@@ -1,4 +1,4 @@
-t1960.scala:5: error: parameter 'p' requires field but conflicts with p in 'TBase'
+t1960.scala:5: error: parameter 'p' requires field but conflicts with method p in trait TBase
class Aclass (p: Int) extends TBase { def g() { f(p) } }
^
one error found
diff --git a/test/files/neg/t5354.check b/test/files/neg/t5354.check
new file mode 100644
index 0000000000..e47cecb5fe
--- /dev/null
+++ b/test/files/neg/t5354.check
@@ -0,0 +1,7 @@
+t5354.scala:9: error: ambiguous implicit values:
+ both method x123 in package foo of type => foo.Bippy
+ and method z of type => foo.Bippy
+ match expected type foo.Bippy
+ implicitly[Bippy]
+ ^
+one error found
diff --git a/test/files/neg/t5354.scala b/test/files/neg/t5354.scala
new file mode 100644
index 0000000000..99b5650155
--- /dev/null
+++ b/test/files/neg/t5354.scala
@@ -0,0 +1,15 @@
+package object foo {
+ implicit def x123: Bippy = new Bippy("x")
+}
+package foo {
+ class Bippy(override val toString: String){ }
+ class Dingus {
+ def f1 = {
+ implicit def z: Bippy = new Bippy("z")
+ implicitly[Bippy]
+ }
+ }
+ object Test extends App {
+ println(new Dingus().f1)
+ }
+}
diff --git a/test/files/neg/t5357.check b/test/files/neg/t5357.check
new file mode 100644
index 0000000000..3385559071
--- /dev/null
+++ b/test/files/neg/t5357.check
@@ -0,0 +1,4 @@
+t5357.scala:5: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.)
+ case A: N => 1
+ ^
+one error found
diff --git a/test/files/neg/t5357.scala b/test/files/neg/t5357.scala
new file mode 100644
index 0000000000..369a5568a4
--- /dev/null
+++ b/test/files/neg/t5357.scala
@@ -0,0 +1,9 @@
+trait M
+
+case class N() extends M {
+ def mytest(x: M) = x match {
+ case A: N => 1
+ case _ => 0
+ }
+}
+
diff --git a/test/files/pos/t1459/App.scala b/test/files/pos/t1459/App.scala
index 651b285b17..36e5022e94 100755
--- a/test/files/pos/t1459/App.scala
+++ b/test/files/pos/t1459/App.scala
@@ -1,7 +1,7 @@
package foo
import base._
-object App extends Application {
+object App extends scala.App {
class Concrete extends AbstractBase {
override def doStuff(params:java.lang.String*): Unit = println("doStuff invoked")
}
diff --git a/test/files/pos/t4063.scala b/test/files/pos/t4063.scala
new file mode 100644
index 0000000000..5e19c42edc
--- /dev/null
+++ b/test/files/pos/t4063.scala
@@ -0,0 +1,39 @@
+trait Parallel
+trait Parallelizable[+ParRepr <: Parallel]
+
+trait PIterableLike[+T, +Repr <: Parallel] extends Parallel with Parallelizable[PIterableLike[T, Repr]]
+
+trait PMap[K, V] extends PIterableLike[(K, V), PMap[K, V]]
+trait PSet[T] extends PIterableLike[T, PSet[T]]
+
+trait CIterableLike[+T, +Repr]
+
+trait CSet[T] extends CIterableLike[T, CSet[T]] with Parallelizable[PSet[T]]
+
+trait CMap[K, V] extends CIterableLike[(K, V), CMap[K, V]] with Parallelizable[PMap[K, V]]
+
+object Test {
+ var x = 0
+
+ def main() {
+ val map: CMap[Int, CSet[Int]] = new CMap[Int, CSet[Int]] {}
+ val set: CSet[Int] = new CSet[Int] {}
+
+ // should infer type argument
+ //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel with Parallelizable[Parallel]]]] {
+ // or:
+ //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel]]] {
+ // or, maybe it could also infer existential types:
+ //map.synchronized[CIterableLike[Any, _] with Parallelizable[PIterableLike[Any, _]]] {
+
+ map.synchronized {
+ if (x == 0) {
+ map
+ } else {
+ set
+ }
+ }
+
+ }
+}
+
diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala
new file mode 100644
index 0000000000..29c8d16e30
--- /dev/null
+++ b/test/files/pos/t4070.scala
@@ -0,0 +1,37 @@
+package a {
+ // method before classes
+ trait Foo {
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+
+ class Dingus[T]
+ class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]]
+ }
+}
+
+package b {
+ // classes before method
+ trait Foo {
+ class Dingus[T]
+ class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]]
+
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+ }
+}
+
+
+/*
+// With crash below the clasess:
+% scalac -Dscalac.debug.tvar ./a.scala
+[ create] ?_$1 ( In Foo#crash )
+[ setInst] tv[Int] ( In Foo#crash, _$1=tv[Int] )
+[ create] tv[Int] ( In Foo#crash )
+[ clone] tv[Int] ( Foo#crash )
+
+// With crash above the classes:
+% scalac -Dscalac.debug.tvar ./a.scala
+[ create] ?tv ( In Foo#crash )
+./a.scala:2: error: Invalid type application in TypeVar: List(), List(Int)
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+ ^
+one error found
+*/
diff --git a/test/files/pos/t4070b.scala b/test/files/pos/t4070b.scala
new file mode 100644
index 0000000000..36d03de80c
--- /dev/null
+++ b/test/files/pos/t4070b.scala
@@ -0,0 +1,35 @@
+package a {
+ abstract class DeliteOp[B]
+ abstract class DeliteCollection[A]
+ abstract class Exp[T] { def Type: T }
+
+ trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] {
+ val in: Exp[C[A]]
+ val func: Exp[B]
+ val alloc: Exp[C[B]]
+ }
+
+ object Test {
+ def f(x: DeliteOp[_]) = x match {
+ case map: DeliteOpMap[_,_,_] => map.alloc.Type
+ }
+ }
+}
+
+package b {
+ object Test {
+ def f(x: DeliteOp[_]) = x match {
+ case map: DeliteOpMap[_,_,_] => map.alloc.Type
+ }
+ }
+
+ abstract class DeliteOp[B]
+ abstract class DeliteCollection[A]
+ abstract class Exp[T] { def Type: T }
+
+ trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] {
+ val in: Exp[C[A]]
+ val func: Exp[B]
+ val alloc: Exp[C[B]]
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t4273.scala b/test/files/pos/t4273.scala
new file mode 100644
index 0000000000..9a942e8325
--- /dev/null
+++ b/test/files/pos/t4273.scala
@@ -0,0 +1,8 @@
+class A {
+ implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.Ops(x)
+
+ class Bippy
+ implicit val bippyOrdering = new Ordering[Bippy] { def compare(x: Bippy, y: Bippy) = util.Random.nextInt }
+
+ (new Bippy) < (new Bippy)
+} \ No newline at end of file
diff --git a/test/files/pos/t5020.scala b/test/files/pos/t5020.scala
new file mode 100644
index 0000000000..06f7723f9f
--- /dev/null
+++ b/test/files/pos/t5020.scala
@@ -0,0 +1,19 @@
+package a {
+ sealed trait GenericList[U, M[_ <: U]] {
+ type Transformed[N[MMA <: U]] <: GenericList[U, N]
+ }
+
+ trait GenericCons[U, M[_ <: U], T <: GenericList[U, M]] extends GenericList[U, M] {
+ type Transformed[N[MMB <: U]] = GenericCons[U, N, GenericList[U, M]#Transformed[N]]
+ }
+}
+
+package b {
+ sealed trait GenericList[L, M[_ >: L]] {
+ type Transformed[N[MMA >: L]] <: GenericList[L, N]
+ }
+
+ trait GenericCons[L, M[_ >: L], T <: GenericList[L, M]] extends GenericList[L, M] {
+ type Transformed[N[MMB >: L]] = GenericCons[L, N, T#Transformed[N]]
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t5119.scala b/test/files/pos/t5119.scala
new file mode 100644
index 0000000000..4a67244e50
--- /dev/null
+++ b/test/files/pos/t5119.scala
@@ -0,0 +1,13 @@
+import collection.mutable
+
+object Test {
+ class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) {
+ def mapSeparate[VL[_], VR[_]](f: V[_] => ({type l[T] = Either[VL[T], VR[T]]})#l[_] ) = {
+ backing.view.map { case (k,v) => f(v) match {
+ case Left(l) => Left((k, l))
+ case Right(r) => Right((k, r))
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/pos/t5175.flags b/test/files/pos/t5175.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t5175.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t5175.scala b/test/files/pos/t5175.scala
new file mode 100644
index 0000000000..e15cc3affd
--- /dev/null
+++ b/test/files/pos/t5175.scala
@@ -0,0 +1,9 @@
+object Test {
+ def ==(p: Phase): Int = 0
+
+ def foo {
+ ==(new Phase())
+ }
+}
+
+class Phase
diff --git a/test/files/pos/t5317.scala b/test/files/pos/t5317.scala
new file mode 100644
index 0000000000..8c9c9d8222
--- /dev/null
+++ b/test/files/pos/t5317.scala
@@ -0,0 +1,12 @@
+object Test {
+ trait S { type T; val x: AnyRef }
+ trait A extends S { type T <: A; val x: A = null }
+ trait B extends S { type T <: B; val x: B = null }
+
+ val a = new A{}
+ val b = new B{}
+ val y = if (true) a else b
+
+ // lub of y should allow for this
+ println(y.x.x)
+}
diff --git a/test/files/pos/t5359.scala b/test/files/pos/t5359.scala
new file mode 100644
index 0000000000..c22b2b1c76
--- /dev/null
+++ b/test/files/pos/t5359.scala
@@ -0,0 +1,17 @@
+// /scala/trac/5359/a.scala
+// Thu Jan 5 13:31:05 PST 2012
+
+object test {
+ trait Step[F[_]] {
+ // crash: typeConstructor inapplicable for <none>
+ this match {
+ case S1() =>
+ }
+ }
+ case class S1[F[_]]() extends Step[F]
+
+ // okay
+ (null: Step[Option]) match {
+ case S1() =>
+ }
+}
diff --git a/test/files/pos/virtpatmat_alts_subst.flags b/test/files/pos/virtpatmat_alts_subst.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/pos/virtpatmat_alts_subst.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/pos/virtpatmat_alts_subst.scala b/test/files/pos/virtpatmat_alts_subst.scala
new file mode 100644
index 0000000000..e27c52f9c7
--- /dev/null
+++ b/test/files/pos/virtpatmat_alts_subst.scala
@@ -0,0 +1,6 @@
+case class Foo(s: String) {
+ def appliedType(tycon: Any) =
+ tycon match {
+ case Foo(sym @ ("NothingClass" | "AnyClass")) => println(sym)
+ }
+}
diff --git a/test/files/pos/virtpatmat_binding_opt.flags b/test/files/pos/virtpatmat_binding_opt.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/pos/virtpatmat_binding_opt.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala
new file mode 100644
index 0000000000..962e3d7dbe
--- /dev/null
+++ b/test/files/pos/virtpatmat_binding_opt.scala
@@ -0,0 +1,11 @@
+class Test {
+ def combine = this match {
+ case that if that eq this => this // just return this
+ case that: Test2 =>
+ println(that)
+ this
+ case _ => error("meh")
+ }
+}
+
+class Test2 extends Test \ No newline at end of file
diff --git a/test/files/run/array-existential-bound.check b/test/files/run/array-existential-bound.check
new file mode 100644
index 0000000000..f5cca843e3
--- /dev/null
+++ b/test/files/run/array-existential-bound.check
@@ -0,0 +1,4 @@
+2
+1000
+1000
+26
diff --git a/test/files/run/array-existential-bound.scala b/test/files/run/array-existential-bound.scala
new file mode 100644
index 0000000000..bc442d39f7
--- /dev/null
+++ b/test/files/run/array-existential-bound.scala
@@ -0,0 +1,17 @@
+trait Fooz[Q <: Array[_]] {
+ def f0(x: Q) = x.length
+}
+
+object Test extends Fooz[Array[Int]] {
+ val f1 = new Fooz[Array[String]] { }
+ val f2 = new Fooz[Array[Int]] { }
+ val f3 = new Fooz[Array[Any]] { }
+ val f4 = new Fooz[Array[_]] { }
+
+ def main(args: Array[String]): Unit = {
+ println(f1.f0(Array[String]("a", "b")))
+ println(f2.f0(1 to 1000 toArray))
+ println(f3.f0((1 to 1000).toArray[Any]))
+ println(f4.f0('a' to 'z' toArray))
+ }
+}
diff --git a/test/files/run/mixin-bridge-methods.scala b/test/files/run/mixin-bridge-methods.scala
new file mode 100644
index 0000000000..e0340ebb12
--- /dev/null
+++ b/test/files/run/mixin-bridge-methods.scala
@@ -0,0 +1,14 @@
+trait Foo {
+ def getFoo() = "foo"
+}
+
+class Sub extends Foo {
+ def getBar() = "bar"
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val ms = classOf[Sub].getDeclaredMethods
+ assert(ms forall (x => !x.isBridge), ms mkString " ")
+ }
+}
diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala
index ab873bca89..9dc6071c7b 100644
--- a/test/files/run/origins.scala
+++ b/test/files/run/origins.scala
@@ -1,4 +1,4 @@
-import scala.tools.nsc.util.Origins
+import scala.reflect.internal.util.Origins
package goxbox {
object Socks {
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 38e7532133..1e7b6f0cd8 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -2,15 +2,31 @@ Type in expressions to have them evaluated.
Type :help for more information.
scala> :power
-** Power User mode enabled - BEEP BOOP SPIZ **
+** Power User mode enabled - BEEP WHIR GYVE **
** :phase has been set to 'typer'. **
** scala.tools.nsc._ has been imported **
-** global._ and definitions._ also imported **
-** Try :help, vals.<tab>, power.<tab> **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
scala> // guarding against "error: reference to global is ambiguous"
scala> global.emptyValDef // "it is imported twice in the same scope by ..."
res0: $r.global.emptyValDef.type = private val _ = _
+scala> val tp = ArrayClass[scala.util.Random] // magic with manifests
+tp: $r.global.Type = Array[scala.util.Random]
+
+scala> tp.memberType(Array_apply) // evidence
+res1: $r.global.Type = (i: Int)scala.util.Random
+
+scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+m: $r.treedsl.global.Match =
+10 match {
+ case 5 => false
+ case _ => true
+}
+
+scala> typed(m).tpe // typed is in scope
+res2: $r.treedsl.global.Type = Boolean
+
scala>
diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala
index 9f70ac4b68..27da3df106 100644
--- a/test/files/run/repl-power.scala
+++ b/test/files/run/repl-power.scala
@@ -5,6 +5,10 @@ object Test extends ReplTest {
:power
// guarding against "error: reference to global is ambiguous"
global.emptyValDef // "it is imported twice in the same scope by ..."
+val tp = ArrayClass[scala.util.Random] // magic with manifests
+tp.memberType(Array_apply) // evidence
+val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+typed(m).tpe // typed is in scope
""".trim
}
diff --git a/test/files/run/t3758.scala b/test/files/run/t3758.scala
new file mode 100644
index 0000000000..18750b0a9c
--- /dev/null
+++ b/test/files/run/t3758.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(classManifest[Array[String]].typeArguments contains classManifest[String])
+ assert(classManifest[Array[Int]].typeArguments contains classManifest[Int])
+ assert(classManifest[Array[Float]].typeArguments contains classManifest[Float])
+ assert(manifest[Array[String]].typeArguments contains manifest[String])
+ assert(manifest[Array[Int]].typeArguments contains manifest[Int])
+ assert(manifest[Array[Float]].typeArguments contains manifest[Float])
+ }
+}
diff --git a/test/files/run/t4024.scala b/test/files/run/t4024.scala
index ef768beb99..7c62a3fc6e 100644
--- a/test/files/run/t4024.scala
+++ b/test/files/run/t4024.scala
@@ -5,5 +5,16 @@ object Test extends App {
val m = x.getClass.getMethod("toString")
assert(m.invoke(x, (Nil: List[AnyRef]): _*) == "abc")
+
+ Test2.main(Array())
}
+
+object Test2 {
+ def main(args: Array[String]): Unit = {
+ val x = "abc"
+ val m = x.getClass.getMethod("toString")
+ m.invoke(x, Nil: _*)
+ m.invoke(x, Seq(): _*)
+ }
+}
diff --git a/test/files/run/t4871.check b/test/files/run/t4871.check
new file mode 100644
index 0000000000..a60526a0f3
--- /dev/null
+++ b/test/files/run/t4871.check
@@ -0,0 +1,2 @@
+class Test$C
+class Test$D
diff --git a/test/files/run/t4871.scala b/test/files/run/t4871.scala
new file mode 100644
index 0000000000..70d8b7145c
--- /dev/null
+++ b/test/files/run/t4871.scala
@@ -0,0 +1,12 @@
+object Test {
+ class C
+ class D
+
+ def main(args: Array[String]): Unit = {
+ val z: Class[C] = classOf
+ val z2: Class[D] = classOf[D]
+
+ println(z)
+ println(z2)
+ }
+}
diff --git a/test/files/run/t5053.check b/test/files/run/t5053.check
new file mode 100644
index 0000000000..5ec39bbdeb
--- /dev/null
+++ b/test/files/run/t5053.check
@@ -0,0 +1,6 @@
+true
+true
+true
+true
+true
+true
diff --git a/test/files/run/t5053.scala b/test/files/run/t5053.scala
new file mode 100644
index 0000000000..e46dad5ac6
--- /dev/null
+++ b/test/files/run/t5053.scala
@@ -0,0 +1,20 @@
+object Test extends App {
+ {
+ val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
+ val (l, m, r) = Seq((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
+ }
+ {
+ val (left, right) = Iterable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
+ val (l, m, r) = Iterable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
+ }
+ {
+ val (left, right) = Traversable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
+ val (l, m, r) = Traversable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
+ }
+}
diff --git a/test/files/run/t5239.check b/test/files/run/t5239.check
deleted file mode 100644
index db5778f95b..0000000000
--- a/test/files/run/t5239.check
+++ /dev/null
@@ -1,13 +0,0 @@
-result = 2{Int(2)}
-[[syntax trees at end of typer]]// Scala source: NoSourceFile
-package <empty> {
- final object __wrapper$1 extends Object {
- def this(): object __wrapper$1 = {
- __wrapper$1.super.this();
- ()
- };
- <static> def wrapper(): Int = 2
- }
-}
-
-evaluated = 2
diff --git a/test/pending/run/t5266_1.check b/test/files/run/t5266_1.check
index 3feac16a0b..3feac16a0b 100644
--- a/test/pending/run/t5266_1.check
+++ b/test/files/run/t5266_1.check
diff --git a/test/files/run/t5239.scala b/test/files/run/t5266_1.scala
index 1f404196ba..18e288e685 100644
--- a/test/files/run/t5239.scala
+++ b/test/files/run/t5266_1.scala
@@ -4,17 +4,13 @@ import reflect.runtime.Mirror.ToolBox
object Test extends App {
val code = scala.reflect.Code.lift{
- 2
+ def x = 2
+ println(x)
};
- val settings = new Settings
- settings.Xprint.value = List("typer")
-
- val reporter = new ConsoleReporter(settings)
+ val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
val ttree = toolbox.typeCheck(code.tree)
- println("result = " + toolbox.showAttributed(ttree))
-
val evaluated = toolbox.runExpr(ttree)
println("evaluated = " + evaluated)
-}
+} \ No newline at end of file
diff --git a/test/pending/run/t5266_2.check b/test/files/run/t5266_2.check
index 3feac16a0b..3feac16a0b 100644
--- a/test/pending/run/t5266_2.check
+++ b/test/files/run/t5266_2.check
diff --git a/test/pending/run/t5266_2.scala b/test/files/run/t5266_2.scala
index cd841da021..eb319583f8 100644
--- a/test/pending/run/t5266_2.scala
+++ b/test/files/run/t5266_2.scala
@@ -9,7 +9,7 @@ object Test extends App {
println(y)
};
- val reporter = new ConsoleReporter(settings)
+ val reporter = new ConsoleReporter(new Settings)
val toolbox = new ToolBox(reporter)
val ttree = toolbox.typeCheck(code.tree)
val evaluated = toolbox.runExpr(ttree)
diff --git a/test/files/run/t5293.scala b/test/files/run/t5293.scala
new file mode 100644
index 0000000000..de1efaec4a
--- /dev/null
+++ b/test/files/run/t5293.scala
@@ -0,0 +1,83 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+ def bench(label: String)(body: => Unit): Long = {
+ val start = System.nanoTime
+
+ 0.until(10).foreach(_ => body)
+
+ val end = System.nanoTime
+
+ //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+ end - start
+ }
+
+ def benchJava(values: java.util.Collection[Int]) = {
+ bench("Java Set") {
+ val set = new java.util.HashSet[Int]
+
+ set.addAll(values)
+ }
+ }
+
+ def benchScala(values: Iterable[Int]) = {
+ bench("Scala Set") {
+ val set = new scala.collection.mutable.HashSet[Int]
+
+ set ++= values
+ }
+ }
+
+ def benchScalaSorted(values: Iterable[Int]) = {
+ bench("Scala Set sorted") {
+ val set = new scala.collection.mutable.HashSet[Int]
+
+ set ++= values.toArray.sorted
+ }
+ }
+
+ def benchScalaPar(values: Iterable[Int]) = {
+ bench("Scala ParSet") {
+ val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x }
+
+ set ++= values
+ }
+ }
+
+ val values = 0 until 50000
+ val set = scala.collection.mutable.HashSet.empty[Int]
+
+ set ++= values
+
+ // warmup
+ for (x <- 0 until 5) {
+ benchJava(set.asJava)
+ benchScala(set)
+ benchScalaPar(set)
+ benchJava(set.asJava)
+ benchScala(set)
+ benchScalaPar(set)
+ }
+
+ val javaset = benchJava(set.asJava)
+ val scalaset = benchScala(set)
+ val scalaparset = benchScalaPar(set)
+
+ assert(scalaset < (javaset * 4))
+ assert(scalaparset < (javaset * 4))
+}
+
+
+
+
+
+
+
+
diff --git a/test/files/run/t5300.scala b/test/files/run/t5300.scala
new file mode 100644
index 0000000000..073b29604a
--- /dev/null
+++ b/test/files/run/t5300.scala
@@ -0,0 +1,7 @@
+object Test {
+ val pf: PartialFunction[Any, Unit] = { case _ => () }
+
+ def main(args: Array[String]): Unit = {
+ pf orElse pf
+ }
+}
diff --git a/test/files/run/t5356.check b/test/files/run/t5356.check
new file mode 100644
index 0000000000..21c4aef07b
--- /dev/null
+++ b/test/files/run/t5356.check
@@ -0,0 +1,6 @@
+1 scala.runtime.RichInt
+1 scala.runtime.RichInt
+1 scala.math.BigInt
+1 scala.runtime.RichDouble
+1 scala.runtime.RichFloat
+1
diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala
new file mode 100644
index 0000000000..f7696c6088
--- /dev/null
+++ b/test/files/run/t5356.scala
@@ -0,0 +1,12 @@
+object Test {
+ def f(x: { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
+
+ def main(args: Array[String]): Unit = {
+ f(1)
+ f(1.toInt)
+ f(BigInt(1))
+ f(1d)
+ f(1f)
+ println((1: { def toInt: Int }).toInt)
+ }
+}
diff --git a/test/files/run/treePrint.scala b/test/files/run/treePrint.scala
index 745c2150c2..e0332a705f 100644
--- a/test/files/run/treePrint.scala
+++ b/test/files/run/treePrint.scala
@@ -35,7 +35,7 @@ object Test {
settings.Ycompacttrees.value = true
val intp = new IMain(settings, new PrintWriter(new NullOutputStream))
- val power = Power(intp)
+ val power = new Power(intp, new ReplVals { })
intp.interpret("""def initialize = "Have to interpret something or we get errors." """)
power trees code foreach println
}
diff --git a/test/files/run/type-currying.check b/test/files/run/type-currying.check
new file mode 100644
index 0000000000..e5db238ca5
--- /dev/null
+++ b/test/files/run/type-currying.check
@@ -0,0 +1,27 @@
+Map(abc -> 55)
+(a,0)
+(b,1)
+(c,2)
+(d,3)
+(e,4)
+(f,5)
+(g,6)
+(h,7)
+(i,8)
+(j,9)
+(k,10)
+(l,11)
+(m,12)
+(n,13)
+(o,14)
+(p,15)
+(q,16)
+(r,17)
+(s,18)
+(t,19)
+(u,20)
+(v,21)
+(w,22)
+(x,23)
+(y,24)
+(z,25)
diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala
new file mode 100644
index 0000000000..f9764c64f0
--- /dev/null
+++ b/test/files/run/type-currying.scala
@@ -0,0 +1,58 @@
+import scala.collection.{ mutable, immutable, generic }
+import generic.CanBuildFrom
+
+object Partial {
+ type KnownContainer[CC[K, V] <: collection.Map[K, V]] = {
+ def values[V] : KnownValues[CC, V]
+ def apply[K] : KnownKeys[CC, K]
+ }
+ type KnownKeys[CC[K, V] <: collection.Map[K, V], K] = {
+ def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V]
+ }
+ type KnownValues[CC[K, V] <: collection.Map[K, V], V] = {
+ def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V]
+ }
+
+ def apply[CC[K, V] <: collection.Map[K, V]] : KnownContainer[CC] = new {
+ def values[V] : KnownValues[CC, V] = new {
+ def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result
+ }
+ def apply[K] = new {
+ def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result
+ }
+ }
+}
+
+object Test {
+ val m = Partial[immutable.TreeMap]
+ val m1 = m[String]
+ val m2 = m[Int][Int]
+
+ val mutableBippy = Partial[mutable.HashMap][String][Int]
+ mutableBippy("abc") = 55
+
+ val immutableBippy = Partial[immutable.HashMap].values[Int]
+ def make[T](xs: T*) = immutableBippy[T] ++ xs.zipWithIndex
+
+ val n0 = Partial[immutable.HashMap][String][Int] ++ Seq(("a", 1))
+ val n1 = Partial.apply[immutable.HashMap].apply[String].apply[Int] ++ Seq(("a", 1))
+
+ def main(args: Array[String]): Unit = {
+ println(mutableBippy)
+ make('a' to 'z': _*).toList.sorted foreach println
+ assert(n0 == n1)
+ }
+}
+
+class A {
+ object Foo {
+ def apply[T] = Bar
+ }
+ object Bar {
+ def apply() = Foo
+ }
+
+ def f() = Foo
+ def g = f()[Int]()[String]()
+ def h = Foo[Foo.type]()[Foo.type]()
+}
diff --git a/test/files/run/virtpatmat_literal.scala b/test/files/run/virtpatmat_literal.scala
index cb72b1d2a5..5bd6b30791 100644
--- a/test/files/run/virtpatmat_literal.scala
+++ b/test/files/run/virtpatmat_literal.scala
@@ -1,8 +1,9 @@
object Test extends App {
+ val a = 1
1 match {
case 2 => println("FAILED")
case 1 => println("OK")
- case 1 => println("FAILED")
+ case `a` => println("FAILED")
}
val one = 1
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala
new file mode 100644
index 0000000000..119e3050ea
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ virtMatch()
+ def virtMatch() = {
+ List(1, 3, 4, 7) match {
+ case 1 :: 3 :: 4 :: 5 :: x => println("nope")
+ case 1 :: 3 :: 4 :: 6 :: x => println("nope")
+ case 1 :: 3 :: 4 :: 7 :: x => println(1)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapplyprod.check b/test/files/run/virtpatmat_unapplyprod.check
new file mode 100644
index 0000000000..2660ff8f96
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.check
@@ -0,0 +1,4 @@
+(2,3)
+(2,3)
+(2,3)
+List(true, false, true)
diff --git a/test/files/run/virtpatmat_unapplyprod.flags b/test/files/run/virtpatmat_unapplyprod.flags
new file mode 100644
index 0000000000..9769db9257
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.flags
@@ -0,0 +1 @@
+ -Yvirtpatmat -Xexperimental
diff --git a/test/files/run/virtpatmat_unapplyprod.scala b/test/files/run/virtpatmat_unapplyprod.scala
new file mode 100644
index 0000000000..441e5e3968
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.scala
@@ -0,0 +1,23 @@
+object Test extends App {
+ case class Foo(x: Int, y: String)
+
+ Foo(2, "3") match {
+ case Foo(x, y) => println((x, y))
+ }
+
+ case class FooSeq(x: Int, y: String, z: Boolean*)
+
+ FooSeq(2, "3") match {
+ case FooSeq(x, y) => println((x, y))
+ }
+
+ FooSeq(2, "3", true, false, true) match {
+ case FooSeq(x, y) => println("nope")
+ case FooSeq(x, y, true, false, true) => println((x, y))
+ }
+
+ FooSeq(1, "a", true, false, true) match {
+ case FooSeq(1, "a") => println("nope")
+ case FooSeq(1, "a", x@_* ) => println(x.toList)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala
new file mode 100644
index 0000000000..8b261acc94
--- /dev/null
+++ b/test/files/run/xml-attribute.scala
@@ -0,0 +1,33 @@
+import xml.Node
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val noAttr = <t/>
+ val attrNull = <t a={ null: String }/>
+ val attrNone = <t a={ None: Option[Seq[Node]] }/>
+ val preAttrNull = <t p:a={ null: String }/>
+ val preAttrNone = <t p:a={ None: Option[Seq[Node]] }/>
+ assert(noAttr == attrNull)
+ assert(noAttr == attrNone)
+ assert(noAttr == preAttrNull)
+ assert(noAttr == preAttrNone)
+
+ val noAttrStr = "<t></t>"
+ assert(noAttr.toString() == noAttrStr)
+ assert(attrNull.toString() == noAttrStr)
+ assert(attrNone.toString() == noAttrStr)
+ assert(preAttrNull.toString() == noAttrStr)
+ assert(preAttrNone.toString() == noAttrStr)
+
+ val xml1 = <t b="1" d="2"/>
+ val xml2 = <t a={ null: String } p:a={ null: String } b="1" c={ null: String } d="2"/>
+ val xml3 = <t b="1" c={ null: String } d="2" a={ null: String } p:a={ null: String }/>
+ assert(xml1 == xml2)
+ assert(xml1 == xml3)
+
+ val xml1Str = "<t d=\"2\" b=\"1\"></t>"
+ assert(xml1.toString() == xml1Str)
+ assert(xml2.toString() == xml1Str)
+ assert(xml3.toString() == xml1Str)
+ }
+}
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index a7e50877a7..0145d3321f 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -8,7 +8,7 @@ import org.scalacheck.Test.{Params, check}
import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
-object CheckEither extends Properties("Either") {
+object Test extends Properties("Either") {
implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
@@ -186,9 +186,3 @@ object CheckEither extends Properties("Either") {
STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this)
}
}
-
-object Test {
- def main(args: Array[String]): Unit = {
- CheckEither.runTests()
- }
-}
diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala
new file mode 100644
index 0000000000..2094a46e37
--- /dev/null
+++ b/test/files/scalacheck/nan-ordering.scala
@@ -0,0 +1,130 @@
+import org.scalacheck._
+import Gen._
+import Prop._
+
+object Test extends Properties("NaN-Ordering") {
+
+ val specFloats: Gen[Float] = oneOf(
+ Float.MaxValue,
+ Float.MinPositiveValue,
+ Float.MinValue,
+ Float.NaN,
+ Float.NegativeInfinity,
+ Float.PositiveInfinity,
+ -0.0f,
+ +0.0f
+ )
+
+ property("Float min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numFloat = implicitly[Numeric[Float]]
+
+ property("Float lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lt(d1, d2) == d1 < d2 }
+
+ property("Float lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lteq(d1, d2) == d1 <= d2 }
+
+ property("Float gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gt(d1, d2) == d1 > d2 }
+
+ property("Float gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gteq(d1, d2) == d1 >= d2 }
+
+ property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) }
+
+ property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = numFloat.reverse.min(d1, d2)
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = numFloat.reverse.max(d1, d2)
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Float reverse.lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Float reverse.lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Float reverse.gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Float reverse.gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Float reverse.equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.equiv(d1, d2) == (d1 == d2) }
+
+
+ val specDoubles: Gen[Double] = oneOf(
+ Double.MaxValue,
+ Double.MinPositiveValue,
+ Double.MinValue,
+ Double.NaN,
+ Double.NegativeInfinity,
+ Double.PositiveInfinity,
+ -0.0,
+ +0.0
+ )
+
+ // ticket #5104
+ property("Double min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numDouble = implicitly[Numeric[Double]]
+
+ property("Double lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lt(d1, d2) == d1 < d2 }
+
+ property("Double lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lteq(d1, d2) == d1 <= d2 }
+
+ property("Double gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gt(d1, d2) == d1 > d2 }
+
+ property("Double gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gteq(d1, d2) == d1 >= d2 }
+
+ property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) }
+
+ property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = numDouble.reverse.min(d1, d2)
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = numDouble.reverse.max(d1, d2)
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Double reverse.lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Double reverse.lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Double reverse.gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Double reverse.gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Double reverse.equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.equiv(d1, d2) == (d1 == d2) }
+}
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 56295f204c..72979115be 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -12,10 +12,16 @@ class Counter(r: Range) {
if (cnt % 500000000L == 0L) {
println("Working: %s %d %d" format (str, cnt, x))
}
- if (cnt > (Int.MaxValue.toLong + 1) * 2)
- error("Count exceeds maximum possible for an Int Range")
- if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x)))
- error("Range wrapped: %d %s" format (x, last.toString))
+ if (cnt > (Int.MaxValue.toLong + 1) * 2) {
+ val msg = "Count exceeds maximum possible for an Int Range: %s" format str
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
+ if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) {
+ val msg = "Range %s wrapped: %d %s" format (str, x, last.toString)
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
last = Some(x)
}
}
@@ -23,29 +29,40 @@ class Counter(r: Range) {
abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
def myGen: Gen[Range]
- val genRange = for {
- start <- arbitrary[Int]
- end <- arbitrary[Int]
- step <- Gen.choose(1, (start - end).abs + 1)
- } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
-
- val genReasonableSizeRange = for {
- start <- choose(-Int.MinValue, Int.MaxValue)
- end <- choose(-Int.MinValue, Int.MaxValue)
+ def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange)
+
+ def genArbitraryRange = for {
+ start <- choose(Int.MinValue, Int.MaxValue)
+ end <- choose(Int.MinValue, Int.MaxValue)
step <- choose(-Int.MaxValue, Int.MaxValue)
} yield Range(start, end, if (step == 0) 100 else step)
- val genSmallRange = for {
+ def genBoundaryRange = for {
+ boundary <- oneOf(Int.MinValue, -1, 0, 1, Int.MaxValue)
+ isStart <- arbitrary[Boolean]
+ size <- choose(1, 100)
+ step <- choose(1, 101)
+ } yield {
+ val signum = if (boundary == 0) 1 else boundary.signum
+ if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum)
+ else Range(boundary - size * boundary.signum, boundary, step * signum)
+ }
+
+
+ def genSmallRange = for {
start <- choose(-100, 100)
end <- choose(-100, 100)
step <- choose(1, 1)
} yield if (start < end) Range(start, end, step) else Range(start, end, -step)
- val genRangeByOne = for {
- start <- arbitrary[Int]
- end <- arbitrary[Int]
- if (end.toLong - start.toLong).abs <= 10000000L
- } yield if (start < end) Range(start, end) else Range(end, start)
+ def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne)
+
+ def genRangeOpenByOne = for {
+ r <- oneOf(genSmallRange, genBoundaryRange)
+ if (r.end.toLong - r.start.toLong).abs <= 10000000L
+ } yield if (r.start < r.end) Range(r.start, r.end) else Range(r.end, r.start)
+
+ def genRangeClosedByOne = for (r <- genRangeOpenByOne) yield r.start to r.end
def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
@@ -71,7 +88,8 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
- property("foreach.step") = forAll(myGen) { r =>
+ property("foreach.step") = forAllNoShrink(myGen) { r =>
+// println("foreach.step "+str(r))
var allValid = true
val cnt = new Counter(r)
// println("--------------------")
@@ -84,6 +102,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("foreach.inside.range") = forAll(myGen) { r =>
+// println("foreach.inside.range "+str(r))
var allValid = true
var last: Option[Int] = None
val cnt = new Counter(r)
@@ -94,6 +113,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("foreach.visited.size") = forAll(myGen) { r =>
+// println("foreach.visited.size "+str(r))
var visited = 0L
val cnt = new Counter(r)
r foreach { x => cnt(x)
@@ -108,14 +128,17 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("length "+str(r))
(r.length == expectedSize(r)) :| str(r)
}
property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("isEmpty "+str(r))
(r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
}
property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
+// println("contains "+str(r))
// println("----------------")
// println(str(r))
// println(x)
@@ -126,11 +149,13 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("take "+str(r))
val t = r take x
(t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
}
property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("init "+str(r))
(r.size == 0) || {
val t = r.init
(t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
@@ -138,6 +163,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("takeWhile "+str(r))
val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
if (r.size == 0) {
(t.size == 0) :| str(r)+" / "+str(t)+": "+x
@@ -148,6 +174,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
}
property("reverse.toSet.equal") = forAll(myGen) { r =>
+// println("reverse.toSet.equal "+str(r))
val reversed = r.reverse
val aresame = r.toSet == reversed.toSet
if (!aresame) {
@@ -157,7 +184,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
println(r.toSet)
println(reversed.toSet)
}
- aresame
+ aresame :| str(r)
}
}
@@ -178,11 +205,11 @@ object InclusiveRangeTest extends RangeTest("inclusive") {
}
object ByOneRangeTest extends RangeTest("byOne") {
- override def myGen = genSmallRange
+ override def myGen = genRangeByOne
}
object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
- override def myGen = for (r <- genSmallRange) yield r.inclusive
+ override def myGen = for (r <- genRangeByOne) yield r.inclusive
}
object SmallValuesRange extends RangeTest("smallValues") {
@@ -207,9 +234,11 @@ object TooLargeRange extends Properties("Too Large Range") {
object Test extends Properties("Range") {
import org.scalacheck.{ Test => STest }
- List(NormalRangeTest, InclusiveRangeTest, ByOneRangeTest, InclusiveByOneRangeTest, TooLargeRange) foreach { ps =>
- STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), ps)
- }
+ include(NormalRangeTest)
+ include(InclusiveRangeTest)
+ include(ByOneRangeTest)
+ include(InclusiveByOneRangeTest)
+ include(TooLargeRange)
}
/* Mini-benchmark
diff --git a/test/partest.bat b/test/partest.bat
index 0b3f5fbf33..4c97a53122 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -101,3 +101,4 @@ goto end
:end
if "%OS%"=="Windows_NT" @endlocal
+exit /b %errorlevel%
diff --git a/test/pending/run/reify_closure1.check b/test/pending/run/reify_closure1.check
new file mode 100644
index 0000000000..b2f7f08c17
--- /dev/null
+++ b/test/pending/run/reify_closure1.check
@@ -0,0 +1,2 @@
+10
+10
diff --git a/test/pending/run/reify_closure1.scala b/test/pending/run/reify_closure1.scala
new file mode 100644
index 0000000000..825a38dc1d
--- /dev/null
+++ b/test/pending/run/reify_closure1.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo[T](ys: List[T]): Int => Int = {
+ val fun: reflect.Code[Int => Int] = x => {
+ x
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(List(1, 2, 3))(10))
+ println(foo(List(1, 2, 3, 4))(10))
+}
diff --git a/test/pending/run/reify_closure2a.check b/test/pending/run/reify_closure2a.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure2a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure2a.scala b/test/pending/run/reify_closure2a.scala
new file mode 100644
index 0000000000..b88bec005d
--- /dev/null
+++ b/test/pending/run/reify_closure2a.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure2b.check b/test/pending/run/reify_closure2b.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure2b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala
new file mode 100644
index 0000000000..e9fb40bede
--- /dev/null
+++ b/test/pending/run/reify_closure2b.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y
+ }
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure3a.check b/test/pending/run/reify_closure3a.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure3a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure3a.scala b/test/pending/run/reify_closure3a.scala
new file mode 100644
index 0000000000..6414fa58a3
--- /dev/null
+++ b/test/pending/run/reify_closure3a.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ def y1 = y
+
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y1
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure3b.check b/test/pending/run/reify_closure3b.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure3b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala
new file mode 100644
index 0000000000..5c4f3c81b9
--- /dev/null
+++ b/test/pending/run/reify_closure3b.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ def y1 = y
+
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y1
+ }
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure4a.check b/test/pending/run/reify_closure4a.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure4a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure4a.scala b/test/pending/run/reify_closure4a.scala
new file mode 100644
index 0000000000..99e9d82706
--- /dev/null
+++ b/test/pending/run/reify_closure4a.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ val y1 = y
+
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y1
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure4b.check b/test/pending/run/reify_closure4b.check
new file mode 100644
index 0000000000..c1f3abd7e6
--- /dev/null
+++ b/test/pending/run/reify_closure4b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala
new file mode 100644
index 0000000000..24dfa9fe17
--- /dev/null
+++ b/test/pending/run/reify_closure4b.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ val y1 = y
+
+ val fun: reflect.Code[Int => Int] = x => {
+ x + y1
+ }
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(new Foo(y).fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
diff --git a/test/pending/run/reify_closure5a.check b/test/pending/run/reify_closure5a.check
new file mode 100644
index 0000000000..df9e19c591
--- /dev/null
+++ b/test/pending/run/reify_closure5a.check
@@ -0,0 +1,2 @@
+13
+14
diff --git a/test/pending/run/reify_closure5a.scala b/test/pending/run/reify_closure5a.scala
new file mode 100644
index 0000000000..0ac53d5479
--- /dev/null
+++ b/test/pending/run/reify_closure5a.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo[T](ys: List[T]): Int => Int = {
+ val fun: reflect.Code[Int => Int] = x => {
+ x + ys.length
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(List(1, 2, 3))(10))
+ println(foo(List(1, 2, 3, 4))(10))
+}
diff --git a/test/pending/run/reify_closure5b.check b/test/pending/run/reify_closure5b.check
new file mode 100644
index 0000000000..df9e19c591
--- /dev/null
+++ b/test/pending/run/reify_closure5b.check
@@ -0,0 +1,2 @@
+13
+14
diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala
new file mode 100644
index 0000000000..02eb771f0c
--- /dev/null
+++ b/test/pending/run/reify_closure5b.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ def foo[T](ys: List[T]): Int => Int = {
+ class Foo[T](ys: List[T]) {
+ val fun: reflect.Code[Int => Int] = x => {
+ x + ys.length
+ }
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(new Foo(ys).fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(List(1, 2, 3))(10))
+ println(foo(List(1, 2, 3, 4))(10))
+}
diff --git a/test/pending/run/reify_closure6.check b/test/pending/run/reify_closure6.check
new file mode 100644
index 0000000000..3526d04b0e
--- /dev/null
+++ b/test/pending/run/reify_closure6.check
@@ -0,0 +1,3 @@
+first invocation = 15
+second invocation = 18
+q after second invocation = 2
diff --git a/test/pending/run/reify_closure6.scala b/test/pending/run/reify_closure6.scala
new file mode 100644
index 0000000000..909071aa44
--- /dev/null
+++ b/test/pending/run/reify_closure6.scala
@@ -0,0 +1,26 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ var q = 0
+ def foo[T](ys: List[T]): Int => Int = {
+ val z = 1
+ var y = 0
+ val fun: reflect.Code[Int => Int] = x => {
+ y += 1
+ q += 1
+ x + ys.length * z + q + y
+ }
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(fun.tree)
+ val dyn = toolbox.runExpr(ttree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println("first invocation = " + foo(List(1, 2, 3))(10))
+ println("second invocation = " + foo(List(1, 2, 3, 4))(10))
+ println("q after second invocation = " + q)
+}
diff --git a/test/pending/run/t5266_1.scala b/test/pending/run/t5266_1.scala
deleted file mode 100644
index 06a81a04ea..0000000000
--- a/test/pending/run/t5266_1.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.Settings
-import reflect.runtime.Mirror.ToolBox
-
-object Test extends App {
- val code = scala.reflect.Code.lift{
- def x = 2
- println(x)
- };
-
- val settings = new Settings
- settings.debug.value = true
- settings.Xshowtrees.value = true
- settings.Xprint.value = List("typer")
- settings.printtypes.value = true
- settings.Ytyperdebug.value = true
-
- val reporter = new ConsoleReporter(settings)
- val toolbox = new ToolBox(reporter)
- val ttree = toolbox.typeCheck(code.tree)
- val evaluated = toolbox.runExpr(ttree)
- println("evaluated = " + evaluated)
-} \ No newline at end of file
diff --git a/test/pending/run/t5334_1.scala b/test/pending/run/t5334_1.scala
new file mode 100644
index 0000000000..c1eba89c2b
--- /dev/null
+++ b/test/pending/run/t5334_1.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C
+ new C
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(code.tree)
+ toolbox.runExpr(ttree)
+}
diff --git a/test/pending/run/t5334_2.scala b/test/pending/run/t5334_2.scala
new file mode 100644
index 0000000000..361b8c85f2
--- /dev/null
+++ b/test/pending/run/t5334_2.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
+import reflect.runtime.Mirror.ToolBox
+
+object Test extends App {
+ val code = scala.reflect.Code.lift{
+ class C
+ List((new C, new C))
+ };
+
+ val reporter = new ConsoleReporter(new Settings)
+ val toolbox = new ToolBox(reporter)
+ val ttree = toolbox.typeCheck(code.tree)
+ toolbox.runExpr(ttree)
+}
diff --git a/test/scaladoc/resources/SI_5054.scala b/test/scaladoc/resources/SI_5054.scala
deleted file mode 100644
index 17167303e4..0000000000
--- a/test/scaladoc/resources/SI_5054.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-class SI_5054 {
-
- /**
- * A simple comment
- *
- * @param lost a lost parameter
- * @usecase def test(): Int
- */
- def test(implicit lost: Int): Int = lost
-} \ No newline at end of file
diff --git a/test/scaladoc/resources/SI_5054_q1.scala b/test/scaladoc/resources/SI_5054_q1.scala
new file mode 100644
index 0000000000..02d9be8dd0
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q1.scala
@@ -0,0 +1,9 @@
+class SI_5054_q1 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q2.scala b/test/scaladoc/resources/SI_5054_q2.scala
new file mode 100644
index 0000000000..c873731e5b
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q2.scala
@@ -0,0 +1,9 @@
+class SI_5054_q2 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ final def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q3.scala b/test/scaladoc/resources/SI_5054_q3.scala
new file mode 100644
index 0000000000..be5d22ffdc
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q3.scala
@@ -0,0 +1,9 @@
+class SI_5054_q3 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ implicit def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q4.scala b/test/scaladoc/resources/SI_5054_q4.scala
new file mode 100644
index 0000000000..4e5e4865f1
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q4.scala
@@ -0,0 +1,9 @@
+abstract class SI_5054_q4 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5054_q5.scala b/test/scaladoc/resources/SI_5054_q5.scala
new file mode 100644
index 0000000000..05ba7488eb
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q5.scala
@@ -0,0 +1,9 @@
+trait SI_5054_q5 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q6.scala b/test/scaladoc/resources/SI_5054_q6.scala
new file mode 100644
index 0000000000..607be654a5
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q6.scala
@@ -0,0 +1,9 @@
+trait SI_5054_q6 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5054_q7.scala b/test/scaladoc/resources/SI_5054_q7.scala
new file mode 100644
index 0000000000..1bd120e30c
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q7.scala
@@ -0,0 +1,22 @@
+trait SI_5054_q7 {
+ /**
+ * The full definition, either used with an implicit value or with an explicit one.
+ *
+ * Some more explanation on implicits...
+ *
+ * @param lost a lost parameter
+ * @return some integer
+ * @usecase def test1(): Int
+ *
+ * This takes the implicit value in scope.
+ *
+ * Example: `test1()`
+ *
+ * @usecase def test2(explicit: Int): Int
+ *
+ * This takes the explicit value passed.
+ *
+ * Example: `test2(3)`
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5287.scala b/test/scaladoc/resources/SI_5287.scala
new file mode 100644
index 0000000000..141ab15325
--- /dev/null
+++ b/test/scaladoc/resources/SI_5287.scala
@@ -0,0 +1,17 @@
+trait SI_5287_A {
+ def method(implicit a: Int): Int = a
+}
+
+trait SI_5287_B extends SI_5287_A {
+ override def method(implicit a: Int): Int = a + 1
+}
+
+trait SI_5287 extends SI_5287_B{
+ /**
+ * Some explanation
+ *
+ * @usecase def method(): Int
+ * The usecase explanation
+ */
+ override def method(implicit a: Int): Int = a + 3
+} \ No newline at end of file
diff --git a/test/scaladoc/scala/html.flags b/test/scaladoc/scala/html.flags
new file mode 100644
index 0000000000..b2264ec4f4
--- /dev/null
+++ b/test/scaladoc/scala/html.flags
@@ -0,0 +1 @@
+-encoding UTF-8 \ No newline at end of file
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.flags b/test/scaladoc/scala/html/HtmlFactoryTest.flags
new file mode 100644
index 0000000000..b2264ec4f4
--- /dev/null
+++ b/test/scaladoc/scala/html/HtmlFactoryTest.flags
@@ -0,0 +1 @@
+-encoding UTF-8 \ No newline at end of file
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala
index c8dad4cf48..5b17affbf0 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala
@@ -21,6 +21,9 @@ object XMLUtil {
}
object Test extends Properties("HtmlFactory") {
+
+ final val RESOURCES = "test/scaladoc/resources/"
+
import scala.tools.nsc.doc.{DocFactory, Settings}
import scala.tools.nsc.doc.model.IndexModelFactory
import scala.tools.nsc.doc.html.HtmlFactory
@@ -47,7 +50,7 @@ object Test extends Properties("HtmlFactory") {
def createTemplates(basename: String) = {
val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
- createFactory.makeUniverse(List("test/scaladoc/resources/"+basename)) match {
+ createFactory.makeUniverse(List(RESOURCES+basename)) match {
case Some(universe) => {
val index = IndexModelFactory.makeIndex(universe)
(new HtmlFactory(universe, index)).writeTemplates((page) => {
@@ -61,7 +64,7 @@ object Test extends Properties("HtmlFactory") {
}
def createReferenceIndex(basename: String) = {
- createFactory.makeUniverse(List("test/scaladoc/resources/"+basename)) match {
+ createFactory.makeUniverse(List(RESOURCES+basename)) match {
case Some(universe) => {
val index = IndexModelFactory.makeIndex(universe)
val pages = index.firstLetterIndex.map({
@@ -81,6 +84,52 @@ object Test extends Properties("HtmlFactory") {
val html = scala.stripSuffix(".scala") + ".html"
createTemplates(scala)(html)
}
+
+ /**
+ * See checkTextOnly(scalaFile: String, checks: List[String])
+ */
+ def checkText1(scalaFile: String, check: String, debug: Boolean = true): Boolean = checkText(scalaFile, List(check), debug)
+
+ /**
+ * This tests the text without the markup - ex:
+ *
+ * <h4 class="signature">
+ * <span class="modifier_kind">
+ * <span class="modifier">implicit</span>
+ * <span class="kind">def</span>
+ * </span>
+ * <span class="symbol">
+ * <span class="name">test</span><span class="params">()</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span>
+ * </span>
+ * </h4>
+ *
+ * becomes:
+ *
+ * implicit def test(): Int
+ *
+ * and is required to contain the text in the given checks
+ *
+ * NOTE: Comparison is done ignoring all whitespace
+ */
+ def checkText(scalaFile: String, checks: List[String], debug: Boolean = true): Boolean = {
+ val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
+ val htmlText = createTemplates(scalaFile)(htmlFile).text.replace('→',' ').replaceAll("\\s+","")
+ var result = true
+
+ for (check <- checks) {
+ val checkText = check.replace('→',' ').replaceAll("\\s+","")
+ val checkValue = htmlText.contains(checkText)
+ if (debug && (!checkValue)) {
+ Console.err.println("Check failed: ")
+ Console.err.println("HTML: " + htmlText)
+ Console.err.println("Check: " + checkText)
+ }
+ result &&= checkValue
+ }
+
+ result
+ }
+
def shortComments(root: scala.xml.Node) =
XMLUtil.stripGroup(root).descendant.flatMap {
@@ -377,20 +426,43 @@ object Test extends Properties("HtmlFactory") {
createTemplate("SI_4898.scala")
true
}
+
+ property("Use cases should override their original members") =
+ checkText1("SI_5054_q1.scala", """def test(): Int""") &&
+ !checkText1("SI_5054_q1.scala", """def test(implicit lost: Int): Int""")
+
- property("Use cases should override their original members - valid until signature is added to html") = {
- createTemplate("SI_5054.scala") match {
- case node: scala.xml.Node =>
- node.toString.contains("A simple comment") &&
- ! node.toString.contains("a lost parameter")
- case _ => false
- }
- }
-
+ property("Use cases should keep their flags - final should not be lost") =
+ checkText1("SI_5054_q2.scala", """final def test(): Int""")
+
+ property("Use cases should keep their flags - implicit should not be lost") =
+ checkText1("SI_5054_q3.scala", """implicit def test(): Int""")
+
+ property("Use cases should keep their flags - real abstract should not be lost") =
+ checkText1("SI_5054_q4.scala", """abstract def test(): Int""")
+
+ property("Use cases should keep their flags - traits should not be affected") =
+ checkText1("SI_5054_q5.scala", """def test(): Int""")
+
+ property("Use cases should keep their flags - traits should not be affected") =
+ checkText1("SI_5054_q6.scala", """abstract def test(): Int""")
+
+ property("Use case individual signature test") =
+ checkText("SI_5054_q7.scala", List(
+ """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""",
+ """abstract def test1(): Int [use case] This takes the implicit value in scope."""))
+
+ property("Display correct \"Definition classes\"") =
+ checkText1("SI_5287.scala",
+ """def method(): Int
+ [use case] The usecase explanation
+ [use case] The usecase explanation
+ Definition Classes SI_5287 SI_5287_B SI_5287_A""", debug=true)
+ // explanation appears twice, as small comment and full comment
{
val files = createTemplates("basic.scala")
- println(files)
+ //println(files)
property("class") = files.get("com/example/p1/Clazz.html") match {
case Some(node: scala.xml.Node) => {
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 1f4733ff3b..79a37dd7df 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -111,7 +111,7 @@ pushJarFiles() {
local user=$2
local password=$3
# TODO - ignore target/ and build/
- local jarFiles=$(find ${basedir} -name "*.jar")
+ local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar")"
for jar in $jarFiles; do
local valid=$(isJarFileValid $jar)
if [[ "$valid" != "OK" ]]; then
@@ -141,7 +141,7 @@ pullJarFile() {
# Argument 1 - The directory to search for *.desired.sha1 files that need to be retrieved.
pullJarFiles() {
local basedir=$1
- local desiredFiles=$(find ${basedir} -name "*${desired_ext}")
+ local desiredFiles="$(find ${basedir}/lib -name *${desired_ext}) $(find ${basedir}/test/files -name *${desired_ext})"
for sha in $desiredFiles; do
jar=${sha%$desired_ext}
local valid=$(isJarFileValid $jar)
diff --git a/tools/epfl-build b/tools/epfl-build
new file mode 100755
index 0000000000..dd66307de3
--- /dev/null
+++ b/tools/epfl-build
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+#
+# builds nightlies
+
+[[ $# -gt 0 ]] || {
+ cat <<EOM
+Usage: $0 <version> [opt opt ...]
+
+ Everything after the version is supplied to scalac and partest.
+ Example: $0 -Xcheckinit -Ycheck:all
+
+Environment variables:
+ extra_ant_targets Additional ant targets to run after nightly
+
+EOM
+ exit 0
+}
+
+# version isn't actually used at present.
+scalaVersion="$1" && shift
+scalaArgs="-Dscalac.args=\"$@\" -Dpartest.scalac_opts=\"$@\""
+
+ant all.clean && ./pull-binary-libs.sh
+
+ant $scalaArgs build-opt &&
+ant $scalaArgs nightly &&
+for target in $extra_ant_targets; do ant $target ; done
+# [[ -n "$BUILD_DOCSCOMP" ]] && ant docscomp
diff --git a/tools/epfl-publish b/tools/epfl-publish
new file mode 100755
index 0000000000..e9cd97b3d2
--- /dev/null
+++ b/tools/epfl-publish
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+#
+# publishes nightly build if $publish_to is set in environment.
+# alternate maven settings.xml file given in $maven_settings.
+#
+
+[[ $# -eq 1 ]] || {
+ cat <<EOM
+Usage: $0 <scala version>
+
+Environment variables:
+ publish_to rsync destination
+EOM
+ exit 0
+}
+version="$1"
+
+[[ -d dists/archives ]] || {
+ echo "Can't find build, has it completed? No directory at dists/archives"
+ exit 1
+}
+
+# should not be hardcoded
+# adds -Dsettings.file= if fixed path is present
+mavenSettingsOption () {
+ hardcoded_path="/home/linuxsoft/apps/hudson-maven-settings/settings.xml"
+
+ # environment variable
+ if [[ -n $maven_settings ]]; then
+ echo -Dsettings.file="$maven_settings"
+ elif [[ -f $hardcoded_path ]]; then
+ echo -Dsettings.file="$hardcoded_path"
+ fi
+}
+
+mavenSettings=${maven_settings:-findMavenSettings}
+
+if [[ -z $publish_to ]]; then
+ echo "Nothing to publish."
+else
+ echo "Publishing nightly build to $publish_to"
+ # Archive Scala nightly distribution
+ rsync -az dists/archives/ "$publish_to/distributions"
+ # don't publish docs in 2.8.x
+ [[ $version == "2.8.x" ]] || rsync -az build/scaladoc/ "$publish_to/docs"
+ # sbaz
+ [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz"
+ # Deploy the maven artifacts on scala-tools.org
+ ( cd dists/maven/latest && ant deploy.snapshot $(mavenSettingsOption) )
+fi
diff --git a/tools/get-scala-revision b/tools/get-scala-revision
index e8597844cc..b27b6ddc82 100755
--- a/tools/get-scala-revision
+++ b/tools/get-scala-revision
@@ -1,19 +1,24 @@
-#!/bin/sh
+#!/usr/bin/env bash
#
# Usage: get-scala-revision [dir]
# Figures out current scala revision of a git clone.
#
# If no dir is given, current working dir is used.
-[ -n "$1" ] && cd "$1"
+# not like releases come out so often that we are duty-bound
+# to recalculate this every time.
+# git merge-base v2.8.2 v2.9.1 master
+devbase="df13e31bbb"
-# dev should be a tag at the merge-base of master and the
-# most recent release.
-if [ -z $(git tag -l dev) ]; then
- # no dev tag available - this will generate dev-g<sha>
- echo "dev-g$(git describe HEAD --abbrev=7 --always)"
-else
- # dev tag exists - this generates dev-NNNN-g<sha>
- # where NNNN is the number of commits since dev.
- git describe HEAD --abbrev=7 --match dev
-fi
+# reimplementing git describe hopefully in a way which works
+# without any particular tags, branches, or recent versions of git.
+# this is supposed to generate
+# dev-NNNN-g<sha>
+# where NNNN is the number of commits since devbase, which
+# is the merge-base of the most recent release and master.
+# Presently hardcoded to reduce uncertainty, v2.8.2/v2.9.1/master.
+commits=$(git --no-pager log --pretty=oneline $devbase..HEAD | wc -l)
+sha=$(git rev-list -n 1 HEAD)
+datestr=$(date "+%Y-%m-%d")
+
+printf "rdev-%s-%s-g%s\n" $commits $datestr ${sha:0:7}